From 01789a008119b6a13841250d05d3e0ca13e792e6 Mon Sep 17 00:00:00 2001 From: jzstark Date: Thu, 7 Mar 2024 19:40:59 +0000 Subject: [PATCH] deploy: 76de9fd3adcdc65add01e85ce08cd62f9f6c0ef1 --- fonts/KaTeX_AMS-Regular.woff2 | Bin 28076 -> 0 bytes fonts/KaTeX_Caligraphic-Bold.woff2 | Bin 6912 -> 0 bytes fonts/KaTeX_Caligraphic-Regular.woff2 | Bin 6908 -> 0 bytes fonts/KaTeX_Fraktur-Bold.woff2 | Bin 11348 -> 0 bytes fonts/KaTeX_Fraktur-Regular.woff2 | Bin 11316 -> 0 bytes fonts/KaTeX_Main-Bold.woff2 | Bin 25324 -> 0 bytes fonts/KaTeX_Main-BoldItalic.woff2 | Bin 16780 -> 0 bytes fonts/KaTeX_Main-Italic.woff2 | Bin 16988 -> 0 bytes fonts/KaTeX_Main-Regular.woff2 | Bin 26272 -> 0 bytes fonts/KaTeX_Math-BoldItalic.woff2 | Bin 16400 -> 0 bytes fonts/KaTeX_Math-Italic.woff2 | Bin 16440 -> 0 bytes fonts/KaTeX_SansSerif-Bold.woff2 | Bin 12216 -> 0 bytes fonts/KaTeX_SansSerif-Italic.woff2 | Bin 12028 -> 0 bytes fonts/KaTeX_SansSerif-Regular.woff2 | Bin 10344 -> 0 bytes fonts/KaTeX_Script-Regular.woff2 | Bin 9644 -> 0 bytes fonts/KaTeX_Size1-Regular.woff2 | Bin 5468 -> 0 bytes fonts/KaTeX_Size2-Regular.woff2 | Bin 5208 -> 0 bytes fonts/KaTeX_Size3-Regular.woff2 | Bin 3624 -> 0 bytes fonts/KaTeX_Size4-Regular.woff2 | Bin 4928 -> 0 bytes fonts/KaTeX_Typewriter-Regular.woff2 | Bin 13568 -> 0 bytes katex.min.css | 1 - katex.min.js | 1 - mathjax.js | 1 - odoc.css | 794 ----------------- owl-base/Owl_algodiff_check/.dummy | 0 .../Make/Forward/index.html | 7 - .../Make/Reverse/index.html | 10 - .../Make/argument-1-AD/A/Linalg/index.html | 6 - .../Make/argument-1-AD/A/Mat/index.html | 2 - .../Make/argument-1-AD/A/Scalar/index.html | 2 - .../Make/argument-1-AD/A/index.html | 158 ---- .../Make/argument-1-AD/Arr/index.html | 2 - .../Make/argument-1-AD/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Make/argument-1-AD/Linalg/index.html | 6 - .../Make/argument-1-AD/Mat/index.html | 2 - .../Make/argument-1-AD/Maths/index.html | 2 - .../Make/argument-1-AD/NN/index.html | 20 - .../Make/argument-1-AD/index.html | 11 - owl-base/Owl_algodiff_check/Make/index.html | 2 - owl-base/Owl_algodiff_core/.dummy | 0 .../Make/A/Linalg/index.html | 6 - .../Owl_algodiff_core/Make/A/Mat/index.html | 2 - .../Make/A/Scalar/index.html | 2 - owl-base/Owl_algodiff_core/Make/A/index.html | 160 ---- .../Make/argument-1-A/Linalg/index.html | 6 - .../Make/argument-1-A/Mat/index.html | 2 - .../Make/argument-1-A/Scalar/index.html | 2 - .../Make/argument-1-A/index.html | 158 ---- owl-base/Owl_algodiff_core/Make/index.html | 3 - owl-base/Owl_algodiff_core_sig/.dummy | 0 .../module-type-Sig/A/Linalg/index.html | 6 - .../module-type-Sig/A/Mat/index.html | 2 - .../module-type-Sig/A/Scalar/index.html | 2 - .../module-type-Sig/A/index.html | 158 ---- .../module-type-Sig/index.html | 2 - owl-base/Owl_algodiff_generic/.dummy | 0 .../Make/A/Linalg/index.html | 6 - .../Make/A/Mat/index.html | 2 - .../Make/A/Scalar/index.html | 2 - .../Owl_algodiff_generic/Make/A/index.html | 160 ---- .../Owl_algodiff_generic/Make/Arr/index.html | 2 - .../Make/Builder/index.html | 2 - .../Make/Builder/module-type-Aiso/index.html | 2 - .../Make/Builder/module-type-Piso/index.html | 2 - .../Make/Builder/module-type-Siao/index.html | 2 - .../Make/Builder/module-type-Sipo/index.html | 7 - .../Make/Builder/module-type-Siso/index.html | 2 - .../Make/Builder/module-type-Sito/index.html | 7 - .../Make/Linalg/index.html | 6 - .../Owl_algodiff_generic/Make/Mat/index.html | 2 - .../Make/Maths/index.html | 2 - .../Owl_algodiff_generic/Make/NN/index.html | 20 - .../Make/argument-1-A/Linalg/index.html | 6 - .../Make/argument-1-A/Mat/index.html | 2 - .../Make/argument-1-A/Scalar/index.html | 2 - .../Make/argument-1-A/index.html | 158 ---- owl-base/Owl_algodiff_generic/Make/index.html | 14 - owl-base/Owl_algodiff_generic_sig/.dummy | 0 .../module-type-Sig/A/Linalg/index.html | 6 - .../module-type-Sig/A/Mat/index.html | 2 - .../module-type-Sig/A/Scalar/index.html | 2 - .../module-type-Sig/A/index.html | 158 ---- .../module-type-Sig/Arr/index.html | 2 - .../module-type-Sig/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../module-type-Sig/Linalg/index.html | 6 - .../module-type-Sig/Mat/index.html | 2 - .../module-type-Sig/Maths/index.html | 2 - .../module-type-Sig/NN/index.html | 20 - .../module-type-Sig/index.html | 11 - owl-base/Owl_algodiff_graph_convert/.dummy | 0 .../Make/argument-1-Core/A/Linalg/index.html | 6 - .../Make/argument-1-Core/A/Mat/index.html | 2 - .../Make/argument-1-Core/A/Scalar/index.html | 2 - .../Make/argument-1-Core/A/index.html | 158 ---- .../Make/argument-1-Core/index.html | 2 - .../Make/index.html | 2 - .../Owl_algodiff_graph_convert_sig/.dummy | 0 .../module-type-Sig/index.html | 2 - owl-base/Owl_algodiff_ops/.dummy | 0 owl-base/Owl_algodiff_ops/Make/Arr/index.html | 2 - .../Owl_algodiff_ops/Make/Builder/index.html | 2 - .../Make/Builder/module-type-Aiso/index.html | 2 - .../Make/Builder/module-type-Piso/index.html | 2 - .../Make/Builder/module-type-Siao/index.html | 7 - .../Make/Builder/module-type-Sipo/index.html | 7 - .../Make/Builder/module-type-Siso/index.html | 2 - .../Make/Builder/module-type-Sito/index.html | 7 - .../Owl_algodiff_ops/Make/Linalg/index.html | 11 - owl-base/Owl_algodiff_ops/Make/Mat/index.html | 2 - .../Owl_algodiff_ops/Make/Maths/index.html | 2 - owl-base/Owl_algodiff_ops/Make/NN/index.html | 80 -- .../Make/argument-1-Core/A/Linalg/index.html | 6 - .../Make/argument-1-Core/A/Mat/index.html | 2 - .../Make/argument-1-Core/A/Scalar/index.html | 2 - .../Make/argument-1-Core/A/index.html | 158 ---- .../Make/argument-1-Core/index.html | 2 - owl-base/Owl_algodiff_ops/Make/index.html | 7 - owl-base/Owl_algodiff_ops_builder/.dummy | 0 .../Make/argument-1-Core/A/Linalg/index.html | 6 - .../Make/argument-1-Core/A/Mat/index.html | 2 - .../Make/argument-1-Core/A/Scalar/index.html | 2 - .../Make/argument-1-Core/A/index.html | 158 ---- .../Make/argument-1-Core/index.html | 2 - .../Owl_algodiff_ops_builder/Make/index.html | 2 - .../Make/module-type-Aiso/index.html | 2 - .../Make/module-type-Piso/index.html | 2 - .../Make/module-type-Siao/index.html | 7 - .../Make/module-type-Sipo/index.html | 7 - .../Make/module-type-Siso/index.html | 2 - .../Make/module-type-Sito/index.html | 7 - owl-base/Owl_algodiff_ops_builder_sig/.dummy | 0 .../module-type-Sig/index.html | 2 - .../module-type-Aiso/index.html | 2 - .../module-type-Piso/index.html | 2 - .../module-type-Siao/index.html | 2 - .../module-type-Sipo/index.html | 7 - .../module-type-Siso/index.html | 2 - .../module-type-Sito/index.html | 7 - owl-base/Owl_algodiff_ops_sig/.dummy | 0 .../module-type-Sig/Arr/index.html | 2 - .../module-type-Sig/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../module-type-Sig/Linalg/index.html | 6 - .../module-type-Sig/Mat/index.html | 2 - .../module-type-Sig/Maths/index.html | 2 - .../module-type-Sig/NN/index.html | 20 - .../module-type-Sig/index.html | 7 - owl-base/Owl_algodiff_reverse/.dummy | 0 .../Make/argument-1-C/A/Linalg/index.html | 6 - .../Make/argument-1-C/A/Mat/index.html | 2 - .../Make/argument-1-C/A/Scalar/index.html | 2 - .../Make/argument-1-C/A/index.html | 158 ---- .../Make/argument-1-C/index.html | 2 - owl-base/Owl_algodiff_reverse/Make/index.html | 2 - owl-base/Owl_algodiff_types/.dummy | 0 .../Make/argument-1-A/Linalg/index.html | 6 - .../Make/argument-1-A/Mat/index.html | 2 - .../Make/argument-1-A/Scalar/index.html | 2 - .../Make/argument-1-A/index.html | 158 ---- owl-base/Owl_algodiff_types/Make/index.html | 2 - owl-base/Owl_algodiff_types_sig/.dummy | 0 .../module-type-Sig/index.html | 2 - owl-base/Owl_base/.dummy | 0 owl-base/Owl_base_algodiff_primal_ops/.dummy | 0 .../D/Linalg/index.html | 11 - .../D/Mat/index.html | 13 - .../Owl_base_algodiff_primal_ops/D/index.html | 164 ---- .../S/Linalg/index.html | 11 - .../S/Mat/index.html | 13 - .../Owl_base_algodiff_primal_ops/S/index.html | 225 ----- owl-base/Owl_base_complex/.dummy | 0 owl-base/Owl_base_dense_common/.dummy | 0 owl-base/Owl_base_dense_matrix_c/.dummy | 0 owl-base/Owl_base_dense_matrix_d/.dummy | 0 owl-base/Owl_base_dense_matrix_generic/.dummy | 0 owl-base/Owl_base_dense_matrix_intf/.dummy | 0 .../module-type-Common/index.html | 2 - owl-base/Owl_base_dense_matrix_s/.dummy | 0 owl-base/Owl_base_dense_matrix_z/.dummy | 0 owl-base/Owl_base_dense_ndarray/.dummy | 0 owl-base/Owl_base_dense_ndarray/C/index.html | 73 -- owl-base/Owl_base_dense_ndarray/D/index.html | 225 ----- .../Owl_base_dense_ndarray/Generic/index.html | 309 ------- .../Operator/index.html | 63 -- owl-base/Owl_base_dense_ndarray/S/index.html | 225 ----- owl-base/Owl_base_dense_ndarray/Z/index.html | 73 -- owl-base/Owl_base_dense_ndarray_c/.dummy | 0 owl-base/Owl_base_dense_ndarray_d/.dummy | 0 .../Owl_base_dense_ndarray_generic/.dummy | 0 owl-base/Owl_base_dense_ndarray_intf/.dummy | 0 .../module-type-Common/index.html | 8 - .../module-type-NN/index.html | 152 ---- .../module-type-Real/index.html | 2 - owl-base/Owl_base_dense_ndarray_s/.dummy | 0 owl-base/Owl_base_dense_ndarray_z/.dummy | 0 owl-base/Owl_base_linalg_c/.dummy | 0 owl-base/Owl_base_linalg_d/.dummy | 0 owl-base/Owl_base_linalg_generic/.dummy | 0 owl-base/Owl_base_linalg_intf/.dummy | 0 .../module-type-Common/index.html | 6 - .../module-type-Real/index.html | 2 - owl-base/Owl_base_linalg_s/.dummy | 0 owl-base/Owl_base_linalg_z/.dummy | 0 owl-base/Owl_base_maths/.dummy | 0 owl-base/Owl_base_slicing/.dummy | 0 owl-base/Owl_base_stats/.dummy | 0 owl-base/Owl_base_stats_dist_bernoulli/.dummy | 0 owl-base/Owl_base_stats_dist_cauchy/.dummy | 0 .../Owl_base_stats_dist_exponential/.dummy | 0 owl-base/Owl_base_stats_dist_gamma/.dummy | 0 owl-base/Owl_base_stats_dist_gaussian/.dummy | 0 owl-base/Owl_base_stats_dist_gumbel1/.dummy | 0 owl-base/Owl_base_stats_dist_gumbel2/.dummy | 0 owl-base/Owl_base_stats_dist_uniform/.dummy | 0 owl-base/Owl_base_stats_prng/.dummy | 0 owl-base/Owl_computation/.dummy | 0 owl-base/Owl_computation_cpu_device/.dummy | 0 .../Make/argument-1-A/Linalg/index.html | 6 - .../Make/argument-1-A/Mat/index.html | 2 - .../Make/argument-1-A/Scalar/index.html | 2 - .../Make/argument-1-A/index.html | 379 --------- .../Make/index.html | 2 - owl-base/Owl_computation_cpu_engine/.dummy | 0 .../Optimiser/Operator/Linalg/index.html | 33 - .../Graph/Optimiser/Operator/Mat/index.html | 2 - .../Optimiser/Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 381 --------- .../Symbol/Shape/Type/Device/index.html | 4 - .../Operator/Symbol/Shape/Type/index.html | 17 - .../Operator/Symbol/Shape/index.html | 5 - .../Optimiser/Operator/Symbol/index.html | 28 - .../Make/Graph/Optimiser/Operator/index.html | 420 --------- .../Make/Graph/Optimiser/index.html | 4 - .../Make/Graph/index.html | 34 - .../Make/argument-1-A/Linalg/index.html | 6 - .../Make/argument-1-A/Mat/index.html | 2 - .../Make/argument-1-A/Scalar/index.html | 2 - .../Make/argument-1-A/index.html | 379 --------- .../Make/index.html | 500 ----------- .../Make_Nested/CG_Eval/index.html | 57 -- .../Make_Nested/CG_Init/MultiMap/index.html | 2 - .../Make_Nested/CG_Init/index.html | 13 - .../Optimiser/Operator/Linalg/index.html | 33 - .../Optimiser/Operator/Mat/index.html | 2 - .../Optimiser/Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Operator/Symbol/Shape/Type/index.html | 5 - .../Operator/Symbol/Shape/index.html | 5 - .../Optimiser/Operator/Symbol/index.html | 28 - .../Optimiser/Operator/index.html | 420 --------- .../argument-1-Graph/Optimiser/index.html | 4 - .../Make_Nested/argument-1-Graph/index.html | 33 - .../Make_Nested/index.html | 4 - owl-base/Owl_computation_cpu_eval/.dummy | 0 .../Optimiser/Operator/Linalg/index.html | 33 - .../Optimiser/Operator/Mat/index.html | 2 - .../Optimiser/Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Operator/Symbol/Shape/Type/index.html | 5 - .../Operator/Symbol/Shape/index.html | 5 - .../Optimiser/Operator/Symbol/index.html | 28 - .../Optimiser/Operator/index.html | 420 --------- .../argument-1-Graph/Optimiser/index.html | 4 - .../Make/argument-1-Graph/index.html | 33 - .../Owl_computation_cpu_eval/Make/index.html | 57 -- owl-base/Owl_computation_cpu_init/.dummy | 0 .../Make/MultiMap/index.html | 2 - .../Optimiser/Operator/Linalg/index.html | 33 - .../Optimiser/Operator/Mat/index.html | 2 - .../Optimiser/Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Operator/Symbol/Shape/Type/index.html | 5 - .../Operator/Symbol/Shape/index.html | 5 - .../Optimiser/Operator/Symbol/index.html | 28 - .../Optimiser/Operator/index.html | 420 --------- .../argument-1-Graph/Optimiser/index.html | 4 - .../Make/argument-1-Graph/index.html | 33 - .../Owl_computation_cpu_init/Make/index.html | 13 - owl-base/Owl_computation_engine/.dummy | 0 .../Optimiser/Operator/Linalg/index.html | 33 - .../Graph/Optimiser/Operator/Mat/index.html | 2 - .../Optimiser/Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Operator/Symbol/Shape/Type/index.html | 5 - .../Operator/Symbol/Shape/index.html | 5 - .../Optimiser/Operator/Symbol/index.html | 28 - .../Graph/Optimiser/Operator/index.html | 420 --------- .../Graph/Optimiser/index.html | 4 - .../argument-1-Engine/Graph/index.html | 33 - .../Flatten/argument-1-Engine/index.html | 2 - .../Owl_computation_engine/Flatten/index.html | 485 ----------- .../Optimiser/Operator/Linalg/index.html | 33 - .../Optimiser/Operator/Mat/index.html | 2 - .../Optimiser/Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 381 --------- .../Symbol/Shape/Type/Device/index.html | 4 - .../Operator/Symbol/Shape/Type/index.html | 17 - .../Operator/Symbol/Shape/index.html | 5 - .../Optimiser/Operator/Symbol/index.html | 28 - .../Make_Graph/Optimiser/Operator/index.html | 420 --------- .../Make_Graph/Optimiser/index.html | 4 - .../argument-1-Device/A/Linalg/index.html | 6 - .../argument-1-Device/A/Mat/index.html | 2 - .../argument-1-Device/A/Scalar/index.html | 2 - .../Make_Graph/argument-1-Device/A/index.html | 379 --------- .../Make_Graph/argument-1-Device/index.html | 2 - .../Make_Graph/index.html | 39 - owl-base/Owl_computation_engine_sig/.dummy | 0 .../A/Linalg/index.html | 6 - .../module-type-Flatten_Sig/A/Mat/index.html | 2 - .../A/Scalar/index.html | 2 - .../module-type-Flatten_Sig/A/index.html | 379 --------- .../Device/A/Linalg/index.html | 6 - .../Device/A/Mat/index.html | 2 - .../Device/A/Scalar/index.html | 2 - .../Device/A/index.html | 379 --------- .../module-type-Flatten_Sig/Device/index.html | 2 - .../Optimiser/Operator/Linalg/index.html | 33 - .../Graph/Optimiser/Operator/Mat/index.html | 2 - .../Optimiser/Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Operator/Symbol/Shape/Type/index.html | 5 - .../Operator/Symbol/Shape/index.html | 5 - .../Optimiser/Operator/Symbol/index.html | 28 - .../Graph/Optimiser/Operator/index.html | 420 --------- .../Graph/Optimiser/index.html | 4 - .../module-type-Flatten_Sig/Graph/index.html | 33 - .../module-type-Flatten_Sig/Linalg/index.html | 33 - .../module-type-Flatten_Sig/Mat/index.html | 2 - .../Operator/Linalg/index.html | 33 - .../Operator/Mat/index.html | 2 - .../Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Operator/Symbol/Shape/Type/index.html | 5 - .../Operator/Symbol/Shape/index.html | 5 - .../Operator/Symbol/index.html | 28 - .../Operator/index.html | 420 --------- .../Optimiser/Operator/Linalg/index.html | 33 - .../Optimiser/Operator/Mat/index.html | 2 - .../Optimiser/Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Operator/Symbol/Shape/Type/index.html | 5 - .../Operator/Symbol/Shape/index.html | 5 - .../Optimiser/Operator/Symbol/index.html | 28 - .../Optimiser/Operator/index.html | 420 --------- .../Optimiser/index.html | 4 - .../module-type-Flatten_Sig/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Shape/Type/Device/A/index.html | 379 --------- .../Shape/Type/Device/index.html | 2 - .../Shape/Type/index.html | 5 - .../module-type-Flatten_Sig/Shape/index.html | 5 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Symbol/Shape/Type/index.html | 5 - .../Symbol/Shape/index.html | 5 - .../module-type-Flatten_Sig/Symbol/index.html | 28 - .../Type/Device/A/Linalg/index.html | 6 - .../Type/Device/A/Mat/index.html | 2 - .../Type/Device/A/Scalar/index.html | 2 - .../Type/Device/A/index.html | 379 --------- .../Type/Device/index.html | 2 - .../module-type-Flatten_Sig/Type/index.html | 5 - .../module-type-Flatten_Sig/index.html | 485 ----------- .../Optimiser/Operator/Linalg/index.html | 33 - .../Optimiser/Operator/Mat/index.html | 2 - .../Optimiser/Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Operator/Symbol/Shape/Type/index.html | 5 - .../Operator/Symbol/Shape/index.html | 5 - .../Optimiser/Operator/Symbol/index.html | 28 - .../Optimiser/Operator/index.html | 420 --------- .../Optimiser/index.html | 4 - .../module-type-Make_Graph_Sig/index.html | 33 - owl-base/Owl_computation_graph/.dummy | 0 .../Operator/Linalg/index.html | 33 - .../Operator/Mat/index.html | 2 - .../Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Operator/Symbol/Shape/Type/index.html | 5 - .../Operator/Symbol/Shape/index.html | 5 - .../Operator/Symbol/index.html | 28 - .../argument-1-Optimiser/Operator/index.html | 420 --------- .../Make/argument-1-Optimiser/index.html | 4 - .../Owl_computation_graph/Make/index.html | 37 - owl-base/Owl_computation_graph_sig/.dummy | 0 .../Optimiser/Operator/Linalg/index.html | 33 - .../Optimiser/Operator/Mat/index.html | 2 - .../Optimiser/Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Operator/Symbol/Shape/Type/index.html | 5 - .../Operator/Symbol/Shape/index.html | 5 - .../Optimiser/Operator/Symbol/index.html | 28 - .../Optimiser/Operator/index.html | 420 --------- .../module-type-Sig/Optimiser/index.html | 4 - .../module-type-Sig/index.html | 33 - owl-base/Owl_computation_operator/.dummy | 0 .../Make/Linalg/index.html | 2 - .../Make/Mat/index.html | 2 - .../Make/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Shape/Type/Device/A/index.html | 379 --------- .../Shape/Type/Device/index.html | 2 - .../argument-1-Symbol/Shape/Type/index.html | 5 - .../Make/argument-1-Symbol/Shape/index.html | 5 - .../Make/argument-1-Symbol/index.html | 28 - .../Owl_computation_operator/Make/index.html | 413 --------- owl-base/Owl_computation_operator_sig/.dummy | 0 .../module-type-Sig/Linalg/index.html | 33 - .../module-type-Sig/Mat/index.html | 2 - .../module-type-Sig/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Symbol/Shape/Type/index.html | 5 - .../module-type-Sig/Symbol/Shape/index.html | 5 - .../module-type-Sig/Symbol/index.html | 28 - .../module-type-Sig/index.html | 420 --------- owl-base/Owl_computation_optimiser/.dummy | 0 .../argument-1-Operator/Linalg/index.html | 33 - .../Make/argument-1-Operator/Mat/index.html | 2 - .../argument-1-Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Symbol/Shape/Type/index.html | 5 - .../Symbol/Shape/index.html | 5 - .../argument-1-Operator/Symbol/index.html | 28 - .../Make/argument-1-Operator/index.html | 420 --------- .../Owl_computation_optimiser/Make/index.html | 4 - owl-base/Owl_computation_optimiser_sig/.dummy | 0 .../Operator/Linalg/index.html | 33 - .../module-type-Sig/Operator/Mat/index.html | 2 - .../Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Operator/Symbol/Shape/Type/index.html | 5 - .../Operator/Symbol/Shape/index.html | 5 - .../Operator/Symbol/index.html | 28 - .../module-type-Sig/Operator/index.html | 420 --------- .../module-type-Sig/index.html | 4 - owl-base/Owl_computation_shape/.dummy | 0 .../Device/A/Linalg/index.html | 6 - .../argument-1-Type/Device/A/Mat/index.html | 2 - .../Device/A/Scalar/index.html | 2 - .../Make/argument-1-Type/Device/A/index.html | 379 --------- .../Make/argument-1-Type/Device/index.html | 2 - .../Make/argument-1-Type/index.html | 5 - .../Owl_computation_shape/Make/index.html | 106 --- owl-base/Owl_computation_shape_sig/.dummy | 0 .../Type/Device/A/Linalg/index.html | 6 - .../Type/Device/A/Mat/index.html | 2 - .../Type/Device/A/Scalar/index.html | 2 - .../module-type-Sig/Type/Device/A/index.html | 379 --------- .../module-type-Sig/Type/Device/index.html | 2 - .../module-type-Sig/Type/index.html | 5 - .../module-type-Sig/index.html | 5 - owl-base/Owl_computation_symbol/.dummy | 0 .../Type/Device/A/Linalg/index.html | 6 - .../Type/Device/A/Mat/index.html | 2 - .../Type/Device/A/Scalar/index.html | 2 - .../argument-1-Shape/Type/Device/A/index.html | 379 --------- .../argument-1-Shape/Type/Device/index.html | 2 - .../Make/argument-1-Shape/Type/index.html | 5 - .../Make/argument-1-Shape/index.html | 5 - .../Owl_computation_symbol/Make/index.html | 19 - owl-base/Owl_computation_symbol_sig/.dummy | 0 .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Shape/Type/Device/A/index.html | 379 --------- .../Shape/Type/Device/index.html | 2 - .../module-type-Sig/Shape/Type/index.html | 5 - .../module-type-Sig/Shape/index.html | 5 - .../module-type-Sig/index.html | 28 - owl-base/Owl_computation_type/.dummy | 0 .../argument-1-Device/A/Linalg/index.html | 6 - .../Make/argument-1-Device/A/Mat/index.html | 2 - .../argument-1-Device/A/Scalar/index.html | 2 - .../Make/argument-1-Device/A/index.html | 379 --------- .../Make/argument-1-Device/index.html | 2 - owl-base/Owl_computation_type/Make/index.html | 5 - owl-base/Owl_computation_type_sig/.dummy | 0 .../Device/A/Linalg/index.html | 6 - .../module-type-Sig/Device/A/Mat/index.html | 2 - .../Device/A/Scalar/index.html | 2 - .../module-type-Sig/Device/A/index.html | 379 --------- .../module-type-Sig/Device/index.html | 2 - .../module-type-Sig/index.html | 5 - owl-base/Owl_const/.dummy | 0 owl-base/Owl_const/CGS/index.html | 2 - owl-base/Owl_const/CGSM/index.html | 2 - owl-base/Owl_const/MKS/index.html | 2 - owl-base/Owl_const/Prefix/index.html | 2 - owl-base/Owl_const/SI/index.html | 2 - owl-base/Owl_countmin_sketch/.dummy | 0 .../Make/argument-1-T/index.html | 2 - owl-base/Owl_countmin_sketch/Make/index.html | 2 - .../Owl_countmin_sketch/Native/index.html | 2 - owl-base/Owl_countmin_sketch/Owl/index.html | 2 - owl-base/Owl_countmin_sketch_sig/.dummy | 0 .../module-type-Sig/index.html | 2 - owl-base/Owl_countmin_table/.dummy | 0 owl-base/Owl_countmin_table/Native/index.html | 2 - owl-base/Owl_countmin_table/Owl/index.html | 2 - .../module-type-Sig/index.html | 2 - owl-base/Owl_dataframe/.dummy | 0 owl-base/Owl_exception/.dummy | 0 owl-base/Owl_graph/.dummy | 0 owl-base/Owl_heavyhitters_sketch/.dummy | 0 .../Make/argument-1-CM/index.html | 2 - .../Owl_heavyhitters_sketch/Make/index.html | 2 - .../Owl_heavyhitters_sketch/Native/index.html | 2 - .../Owl_heavyhitters_sketch/Owl/index.html | 2 - owl-base/Owl_heavyhitters_sketch_sig/.dummy | 0 .../module-type-Sig/index.html | 2 - owl-base/Owl_io/.dummy | 0 owl-base/Owl_lazy/.dummy | 0 .../Make/argument-1-A/Linalg/index.html | 6 - .../Owl_lazy/Make/argument-1-A/Mat/index.html | 2 - .../Make/argument-1-A/Scalar/index.html | 2 - .../Owl_lazy/Make/argument-1-A/index.html | 379 --------- owl-base/Owl_lazy/Make/index.html | 87 -- owl-base/Owl_log/.dummy | 0 owl-base/Owl_maths_interpolate/.dummy | 0 owl-base/Owl_maths_quadrature/.dummy | 0 owl-base/Owl_maths_root/.dummy | 0 owl-base/Owl_neural_compiler/.dummy | 0 .../Optimiser/Operator/Linalg/index.html | 33 - .../Graph/Optimiser/Operator/Mat/index.html | 2 - .../Optimiser/Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Operator/Symbol/Shape/Type/index.html | 5 - .../Operator/Symbol/Shape/index.html | 5 - .../Optimiser/Operator/Symbol/index.html | 28 - .../Graph/Optimiser/Operator/index.html | 420 --------- .../Make/Engine/Graph/Optimiser/index.html | 4 - .../Make/Engine/Graph/index.html | 33 - .../Make/Engine/index.html | 485 ----------- .../Neural/Graph/Neuron/Activation/index.html | 4 - .../Make/Neural/Graph/Neuron/Add/index.html | 4 - .../Graph/Neuron/AlphaDropout/index.html | 4 - .../Neural/Graph/Neuron/Average/index.html | 4 - .../Neural/Graph/Neuron/AvgPool1D/index.html | 4 - .../Neural/Graph/Neuron/AvgPool2D/index.html | 4 - .../Graph/Neuron/Concatenate/index.html | 4 - .../Neural/Graph/Neuron/Conv1D/index.html | 10 - .../Neural/Graph/Neuron/Conv2D/index.html | 10 - .../Neural/Graph/Neuron/Conv3D/index.html | 10 - .../Graph/Neuron/DilatedConv1D/index.html | 11 - .../Graph/Neuron/DilatedConv2D/index.html | 11 - .../Graph/Neuron/DilatedConv3D/index.html | 11 - .../Make/Neural/Graph/Neuron/Dot/index.html | 4 - .../Neural/Graph/Neuron/Dropout/index.html | 4 - .../Neural/Graph/Neuron/Embedding/index.html | 4 - .../Neural/Graph/Neuron/Flatten/index.html | 4 - .../Graph/Neuron/FullyConnected/index.html | 4 - .../Make/Neural/Graph/Neuron/GRU/index.html | 4 - .../Graph/Neuron/GaussianDropout/index.html | 4 - .../Graph/Neuron/GaussianNoise/index.html | 4 - .../Graph/Neuron/GlobalAvgPool1D/index.html | 4 - .../Graph/Neuron/GlobalAvgPool2D/index.html | 4 - .../Graph/Neuron/GlobalMaxPool1D/index.html | 4 - .../Graph/Neuron/GlobalMaxPool2D/index.html | 4 - .../Make/Neural/Graph/Neuron/Init/index.html | 2 - .../Make/Neural/Graph/Neuron/Input/index.html | 4 - .../Make/Neural/Graph/Neuron/LSTM/index.html | 4 - .../Neural/Graph/Neuron/Lambda/index.html | 7 - .../Graph/Neuron/LambdaArray/index.html | 7 - .../Neural/Graph/Neuron/Linear/index.html | 4 - .../Graph/Neuron/LinearNoBias/index.html | 4 - .../Neural/Graph/Neuron/Masking/index.html | 2 - .../Make/Neural/Graph/Neuron/Max/index.html | 4 - .../Neural/Graph/Neuron/MaxPool1D/index.html | 4 - .../Neural/Graph/Neuron/MaxPool2D/index.html | 4 - .../Make/Neural/Graph/Neuron/Mul/index.html | 4 - .../Graph/Neuron/Normalisation/index.html | 10 - .../Optimise/Algodiff/A/Linalg/index.html | 6 - .../Neuron/Optimise/Algodiff/A/Mat/index.html | 2 - .../Optimise/Algodiff/A/Scalar/index.html | 2 - .../Neuron/Optimise/Algodiff/A/index.html | 160 ---- .../Neuron/Optimise/Algodiff/Arr/index.html | 2 - .../Optimise/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Optimise/Algodiff/Linalg/index.html | 6 - .../Neuron/Optimise/Algodiff/Mat/index.html | 2 - .../Neuron/Optimise/Algodiff/Maths/index.html | 2 - .../Neuron/Optimise/Algodiff/NN/index.html | 20 - .../Graph/Neuron/Optimise/Algodiff/index.html | 2 - .../Graph/Neuron/Optimise/Batch/index.html | 2 - .../Neuron/Optimise/Checkpoint/index.html | 6 - .../Graph/Neuron/Optimise/Clipping/index.html | 4 - .../Graph/Neuron/Optimise/Gradient/index.html | 11 - .../Neuron/Optimise/Learning_Rate/index.html | 4 - .../Graph/Neuron/Optimise/Loss/index.html | 2 - .../Graph/Neuron/Optimise/Momentum/index.html | 4 - .../Graph/Neuron/Optimise/Params/index.html | 15 - .../Neuron/Optimise/Regularisation/index.html | 4 - .../Graph/Neuron/Optimise/Stopping/index.html | 4 - .../Graph/Neuron/Optimise/Utils/index.html | 7 - .../Neural/Graph/Neuron/Optimise/index.html | 31 - .../Neural/Graph/Neuron/Padding1D/index.html | 2 - .../Neural/Graph/Neuron/Padding2D/index.html | 4 - .../Neural/Graph/Neuron/Padding3D/index.html | 2 - .../Neural/Graph/Neuron/Recurrent/index.html | 11 - .../Neural/Graph/Neuron/Reshape/index.html | 4 - .../Make/Neural/Graph/Neuron/Slice/index.html | 4 - .../Graph/Neuron/TransposeConv1D/index.html | 10 - .../Graph/Neuron/TransposeConv2D/index.html | 10 - .../Graph/Neuron/TransposeConv3D/index.html | 10 - .../Graph/Neuron/UpSampling1D/index.html | 2 - .../Graph/Neuron/UpSampling2D/index.html | 4 - .../Graph/Neuron/UpSampling3D/index.html | 2 - .../Make/Neural/Graph/Neuron/index.html | 2 - .../Make/Neural/Graph/index.html | 243 ------ .../Make/Neural/index.html | 2 - .../Optimiser/Operator/Linalg/index.html | 33 - .../Graph/Optimiser/Operator/Mat/index.html | 2 - .../Optimiser/Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Operator/Symbol/Shape/Type/index.html | 5 - .../Operator/Symbol/Shape/index.html | 5 - .../Optimiser/Operator/Symbol/index.html | 28 - .../Graph/Optimiser/Operator/index.html | 420 --------- .../argument-1-E/Graph/Optimiser/index.html | 4 - .../Make/argument-1-E/Graph/index.html | 33 - .../Make/argument-1-E/index.html | 2 - owl-base/Owl_neural_compiler/Make/index.html | 49 -- owl-base/Owl_neural_generic/.dummy | 0 .../Neuron/Activation/index.html | 2 - .../argument-1-Graph/Neuron/Add/index.html | 2 - .../Neuron/AlphaDropout/index.html | 2 - .../Neuron/Average/index.html | 2 - .../Neuron/AvgPool1D/index.html | 2 - .../Neuron/AvgPool2D/index.html | 2 - .../Neuron/Concatenate/index.html | 2 - .../argument-1-Graph/Neuron/Conv1D/index.html | 8 - .../argument-1-Graph/Neuron/Conv2D/index.html | 8 - .../argument-1-Graph/Neuron/Conv3D/index.html | 8 - .../Neuron/DilatedConv1D/index.html | 9 - .../Neuron/DilatedConv2D/index.html | 9 - .../Neuron/DilatedConv3D/index.html | 9 - .../argument-1-Graph/Neuron/Dot/index.html | 2 - .../Neuron/Dropout/index.html | 2 - .../Neuron/Embedding/index.html | 2 - .../Neuron/Flatten/index.html | 2 - .../Neuron/FullyConnected/index.html | 2 - .../argument-1-Graph/Neuron/GRU/index.html | 2 - .../Neuron/GaussianDropout/index.html | 2 - .../Neuron/GaussianNoise/index.html | 2 - .../Neuron/GlobalAvgPool1D/index.html | 2 - .../Neuron/GlobalAvgPool2D/index.html | 2 - .../Neuron/GlobalMaxPool1D/index.html | 2 - .../Neuron/GlobalMaxPool2D/index.html | 2 - .../argument-1-Graph/Neuron/Init/index.html | 2 - .../argument-1-Graph/Neuron/Input/index.html | 2 - .../argument-1-Graph/Neuron/LSTM/index.html | 2 - .../argument-1-Graph/Neuron/Lambda/index.html | 5 - .../Neuron/LambdaArray/index.html | 5 - .../argument-1-Graph/Neuron/Linear/index.html | 2 - .../Neuron/LinearNoBias/index.html | 2 - .../Neuron/Masking/index.html | 2 - .../argument-1-Graph/Neuron/Max/index.html | 2 - .../Neuron/MaxPool1D/index.html | 2 - .../Neuron/MaxPool2D/index.html | 2 - .../argument-1-Graph/Neuron/Mul/index.html | 2 - .../Neuron/Normalisation/index.html | 8 - .../Optimise/Algodiff/A/Linalg/index.html | 6 - .../Neuron/Optimise/Algodiff/A/Mat/index.html | 2 - .../Optimise/Algodiff/A/Scalar/index.html | 2 - .../Neuron/Optimise/Algodiff/A/index.html | 158 ---- .../Neuron/Optimise/Algodiff/Arr/index.html | 2 - .../Optimise/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Optimise/Algodiff/Linalg/index.html | 6 - .../Neuron/Optimise/Algodiff/Mat/index.html | 2 - .../Neuron/Optimise/Algodiff/Maths/index.html | 2 - .../Neuron/Optimise/Algodiff/NN/index.html | 20 - .../Neuron/Optimise/Algodiff/index.html | 11 - .../Neuron/Optimise/Batch/index.html | 2 - .../Neuron/Optimise/Checkpoint/index.html | 2 - .../Neuron/Optimise/Clipping/index.html | 2 - .../Neuron/Optimise/Gradient/index.html | 9 - .../Neuron/Optimise/Learning_Rate/index.html | 2 - .../Neuron/Optimise/Loss/index.html | 2 - .../Neuron/Optimise/Momentum/index.html | 2 - .../Neuron/Optimise/Params/index.html | 14 - .../Neuron/Optimise/Regularisation/index.html | 2 - .../Neuron/Optimise/Stopping/index.html | 2 - .../Neuron/Optimise/Utils/index.html | 7 - .../Neuron/Optimise/index.html | 31 - .../Neuron/Padding1D/index.html | 2 - .../Neuron/Padding2D/index.html | 2 - .../Neuron/Padding3D/index.html | 2 - .../Neuron/Recurrent/index.html | 9 - .../Neuron/Reshape/index.html | 2 - .../argument-1-Graph/Neuron/Slice/index.html | 2 - .../Neuron/TransposeConv1D/index.html | 8 - .../Neuron/TransposeConv2D/index.html | 8 - .../Neuron/TransposeConv3D/index.html | 8 - .../Neuron/UpSampling1D/index.html | 2 - .../Neuron/UpSampling2D/index.html | 2 - .../Neuron/UpSampling3D/index.html | 2 - .../argument-1-Graph/Neuron/index.html | 2 - .../Flatten/argument-1-Graph/index.html | 243 ------ .../Owl_neural_generic/Flatten/index.html | 2 - .../Make/Graph/Neuron/Activation/index.html | 2 - .../Make/Graph/Neuron/Add/index.html | 2 - .../Make/Graph/Neuron/AlphaDropout/index.html | 2 - .../Make/Graph/Neuron/Average/index.html | 2 - .../Make/Graph/Neuron/AvgPool1D/index.html | 2 - .../Make/Graph/Neuron/AvgPool2D/index.html | 2 - .../Make/Graph/Neuron/Concatenate/index.html | 2 - .../Make/Graph/Neuron/Conv1D/index.html | 8 - .../Make/Graph/Neuron/Conv2D/index.html | 8 - .../Make/Graph/Neuron/Conv3D/index.html | 8 - .../Graph/Neuron/DilatedConv1D/index.html | 9 - .../Graph/Neuron/DilatedConv2D/index.html | 9 - .../Graph/Neuron/DilatedConv3D/index.html | 9 - .../Make/Graph/Neuron/Dot/index.html | 2 - .../Make/Graph/Neuron/Dropout/index.html | 2 - .../Make/Graph/Neuron/Embedding/index.html | 2 - .../Make/Graph/Neuron/Flatten/index.html | 2 - .../Graph/Neuron/FullyConnected/index.html | 2 - .../Make/Graph/Neuron/GRU/index.html | 2 - .../Graph/Neuron/GaussianDropout/index.html | 2 - .../Graph/Neuron/GaussianNoise/index.html | 2 - .../Graph/Neuron/GlobalAvgPool1D/index.html | 2 - .../Graph/Neuron/GlobalAvgPool2D/index.html | 2 - .../Graph/Neuron/GlobalMaxPool1D/index.html | 2 - .../Graph/Neuron/GlobalMaxPool2D/index.html | 2 - .../Make/Graph/Neuron/Init/index.html | 2 - .../Make/Graph/Neuron/Input/index.html | 2 - .../Make/Graph/Neuron/LSTM/index.html | 2 - .../Make/Graph/Neuron/Lambda/index.html | 5 - .../Make/Graph/Neuron/LambdaArray/index.html | 5 - .../Make/Graph/Neuron/Linear/index.html | 2 - .../Make/Graph/Neuron/LinearNoBias/index.html | 2 - .../Make/Graph/Neuron/Masking/index.html | 2 - .../Make/Graph/Neuron/Max/index.html | 2 - .../Make/Graph/Neuron/MaxPool1D/index.html | 2 - .../Make/Graph/Neuron/MaxPool2D/index.html | 2 - .../Make/Graph/Neuron/Mul/index.html | 2 - .../Graph/Neuron/Normalisation/index.html | 8 - .../Optimise/Algodiff/A/Linalg/index.html | 6 - .../Neuron/Optimise/Algodiff/A/Mat/index.html | 2 - .../Optimise/Algodiff/A/Scalar/index.html | 2 - .../Neuron/Optimise/Algodiff/A/index.html | 158 ---- .../Neuron/Optimise/Algodiff/Arr/index.html | 2 - .../Optimise/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Optimise/Algodiff/Linalg/index.html | 6 - .../Neuron/Optimise/Algodiff/Mat/index.html | 2 - .../Neuron/Optimise/Algodiff/Maths/index.html | 2 - .../Neuron/Optimise/Algodiff/NN/index.html | 20 - .../Graph/Neuron/Optimise/Algodiff/index.html | 2 - .../Graph/Neuron/Optimise/Batch/index.html | 2 - .../Neuron/Optimise/Checkpoint/index.html | 2 - .../Graph/Neuron/Optimise/Clipping/index.html | 2 - .../Graph/Neuron/Optimise/Gradient/index.html | 9 - .../Neuron/Optimise/Learning_Rate/index.html | 2 - .../Graph/Neuron/Optimise/Loss/index.html | 2 - .../Graph/Neuron/Optimise/Momentum/index.html | 2 - .../Graph/Neuron/Optimise/Params/index.html | 14 - .../Neuron/Optimise/Regularisation/index.html | 2 - .../Graph/Neuron/Optimise/Stopping/index.html | 2 - .../Graph/Neuron/Optimise/Utils/index.html | 7 - .../Make/Graph/Neuron/Optimise/index.html | 31 - .../Make/Graph/Neuron/Padding1D/index.html | 2 - .../Make/Graph/Neuron/Padding2D/index.html | 2 - .../Make/Graph/Neuron/Padding3D/index.html | 2 - .../Make/Graph/Neuron/Recurrent/index.html | 9 - .../Make/Graph/Neuron/Reshape/index.html | 2 - .../Make/Graph/Neuron/Slice/index.html | 2 - .../Graph/Neuron/TransposeConv1D/index.html | 8 - .../Graph/Neuron/TransposeConv2D/index.html | 8 - .../Graph/Neuron/TransposeConv3D/index.html | 8 - .../Make/Graph/Neuron/UpSampling1D/index.html | 2 - .../Make/Graph/Neuron/UpSampling2D/index.html | 2 - .../Make/Graph/Neuron/UpSampling3D/index.html | 2 - .../Make/Graph/Neuron/index.html | 2 - .../Owl_neural_generic/Make/Graph/index.html | 243 ------ .../Make/argument-1-A/Linalg/index.html | 6 - .../Make/argument-1-A/Mat/index.html | 2 - .../Make/argument-1-A/Scalar/index.html | 2 - .../Make/argument-1-A/index.html | 158 ---- owl-base/Owl_neural_generic/Make/index.html | 2 - .../Neuron/Activation/index.html | 6 - .../Make_Embedded/Neuron/Add/index.html | 4 - .../Neuron/AlphaDropout/index.html | 4 - .../Make_Embedded/Neuron/Average/index.html | 4 - .../Make_Embedded/Neuron/AvgPool1D/index.html | 4 - .../Make_Embedded/Neuron/AvgPool2D/index.html | 4 - .../Neuron/Concatenate/index.html | 4 - .../Make_Embedded/Neuron/Conv1D/index.html | 10 - .../Make_Embedded/Neuron/Conv2D/index.html | 10 - .../Make_Embedded/Neuron/Conv3D/index.html | 10 - .../Neuron/DilatedConv1D/index.html | 11 - .../Neuron/DilatedConv2D/index.html | 11 - .../Neuron/DilatedConv3D/index.html | 11 - .../Make_Embedded/Neuron/Dot/index.html | 4 - .../Make_Embedded/Neuron/Dropout/index.html | 4 - .../Make_Embedded/Neuron/Embedding/index.html | 4 - .../Make_Embedded/Neuron/Flatten/index.html | 4 - .../Neuron/FullyConnected/index.html | 4 - .../Make_Embedded/Neuron/GRU/index.html | 4 - .../Neuron/GaussianDropout/index.html | 4 - .../Neuron/GaussianNoise/index.html | 4 - .../Neuron/GlobalAvgPool1D/index.html | 4 - .../Neuron/GlobalAvgPool2D/index.html | 4 - .../Neuron/GlobalMaxPool1D/index.html | 4 - .../Neuron/GlobalMaxPool2D/index.html | 4 - .../Make_Embedded/Neuron/Init/index.html | 4 - .../Make_Embedded/Neuron/Input/index.html | 4 - .../Make_Embedded/Neuron/LSTM/index.html | 4 - .../Make_Embedded/Neuron/Lambda/index.html | 7 - .../Neuron/LambdaArray/index.html | 7 - .../Make_Embedded/Neuron/Linear/index.html | 4 - .../Neuron/LinearNoBias/index.html | 4 - .../Make_Embedded/Neuron/Masking/index.html | 2 - .../Make_Embedded/Neuron/Max/index.html | 4 - .../Make_Embedded/Neuron/MaxPool1D/index.html | 4 - .../Make_Embedded/Neuron/MaxPool2D/index.html | 4 - .../Make_Embedded/Neuron/Mul/index.html | 4 - .../Neuron/Normalisation/index.html | 10 - .../Optimise/Algodiff/A/Linalg/index.html | 6 - .../Neuron/Optimise/Algodiff/A/Mat/index.html | 2 - .../Optimise/Algodiff/A/Scalar/index.html | 2 - .../Neuron/Optimise/Algodiff/A/index.html | 160 ---- .../Neuron/Optimise/Algodiff/Arr/index.html | 2 - .../Optimise/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Optimise/Algodiff/Linalg/index.html | 6 - .../Neuron/Optimise/Algodiff/Mat/index.html | 2 - .../Neuron/Optimise/Algodiff/Maths/index.html | 2 - .../Neuron/Optimise/Algodiff/NN/index.html | 20 - .../Neuron/Optimise/Algodiff/index.html | 4 - .../Neuron/Optimise/Batch/index.html | 4 - .../Neuron/Optimise/Checkpoint/index.html | 6 - .../Neuron/Optimise/Clipping/index.html | 4 - .../Neuron/Optimise/Gradient/index.html | 11 - .../Neuron/Optimise/Learning_Rate/index.html | 4 - .../Neuron/Optimise/Loss/index.html | 4 - .../Neuron/Optimise/Momentum/index.html | 4 - .../Neuron/Optimise/Params/index.html | 16 - .../Neuron/Optimise/Regularisation/index.html | 4 - .../Neuron/Optimise/Stopping/index.html | 4 - .../Neuron/Optimise/Utils/index.html | 7 - .../Make_Embedded/Neuron/Optimise/index.html | 31 - .../Make_Embedded/Neuron/Padding1D/index.html | 2 - .../Make_Embedded/Neuron/Padding2D/index.html | 4 - .../Make_Embedded/Neuron/Padding3D/index.html | 2 - .../Make_Embedded/Neuron/Recurrent/index.html | 11 - .../Make_Embedded/Neuron/Reshape/index.html | 4 - .../Make_Embedded/Neuron/Slice/index.html | 4 - .../Neuron/TransposeConv1D/index.html | 10 - .../Neuron/TransposeConv2D/index.html | 10 - .../Neuron/TransposeConv3D/index.html | 10 - .../Neuron/UpSampling1D/index.html | 2 - .../Neuron/UpSampling2D/index.html | 4 - .../Neuron/UpSampling3D/index.html | 2 - .../Make_Embedded/Neuron/index.html | 4 - .../argument-1-A/Linalg/index.html | 6 - .../Make_Embedded/argument-1-A/Mat/index.html | 2 - .../argument-1-A/Scalar/index.html | 2 - .../Make_Embedded/argument-1-A/index.html | 158 ---- .../Make_Embedded/index.html | 247 ------ owl-base/Owl_neural_graph/.dummy | 0 .../argument-1-Neuron/Activation/index.html | 2 - .../Make/argument-1-Neuron/Add/index.html | 2 - .../argument-1-Neuron/AlphaDropout/index.html | 2 - .../Make/argument-1-Neuron/Average/index.html | 2 - .../argument-1-Neuron/AvgPool1D/index.html | 2 - .../argument-1-Neuron/AvgPool2D/index.html | 2 - .../argument-1-Neuron/Concatenate/index.html | 2 - .../Make/argument-1-Neuron/Conv1D/index.html | 8 - .../Make/argument-1-Neuron/Conv2D/index.html | 8 - .../Make/argument-1-Neuron/Conv3D/index.html | 8 - .../DilatedConv1D/index.html | 9 - .../DilatedConv2D/index.html | 9 - .../DilatedConv3D/index.html | 9 - .../Make/argument-1-Neuron/Dot/index.html | 2 - .../Make/argument-1-Neuron/Dropout/index.html | 2 - .../argument-1-Neuron/Embedding/index.html | 2 - .../Make/argument-1-Neuron/Flatten/index.html | 2 - .../FullyConnected/index.html | 2 - .../Make/argument-1-Neuron/GRU/index.html | 2 - .../GaussianDropout/index.html | 2 - .../GaussianNoise/index.html | 2 - .../GlobalAvgPool1D/index.html | 2 - .../GlobalAvgPool2D/index.html | 2 - .../GlobalMaxPool1D/index.html | 2 - .../GlobalMaxPool2D/index.html | 2 - .../Make/argument-1-Neuron/Init/index.html | 2 - .../Make/argument-1-Neuron/Input/index.html | 2 - .../Make/argument-1-Neuron/LSTM/index.html | 2 - .../Make/argument-1-Neuron/Lambda/index.html | 5 - .../argument-1-Neuron/LambdaArray/index.html | 5 - .../Make/argument-1-Neuron/Linear/index.html | 2 - .../argument-1-Neuron/LinearNoBias/index.html | 2 - .../Make/argument-1-Neuron/Masking/index.html | 2 - .../Make/argument-1-Neuron/Max/index.html | 2 - .../argument-1-Neuron/MaxPool1D/index.html | 2 - .../argument-1-Neuron/MaxPool2D/index.html | 2 - .../Make/argument-1-Neuron/Mul/index.html | 2 - .../Normalisation/index.html | 8 - .../Optimise/Algodiff/A/Linalg/index.html | 6 - .../Optimise/Algodiff/A/Mat/index.html | 2 - .../Optimise/Algodiff/A/Scalar/index.html | 2 - .../Optimise/Algodiff/A/index.html | 158 ---- .../Optimise/Algodiff/Arr/index.html | 2 - .../Optimise/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Optimise/Algodiff/Linalg/index.html | 6 - .../Optimise/Algodiff/Mat/index.html | 2 - .../Optimise/Algodiff/Maths/index.html | 2 - .../Optimise/Algodiff/NN/index.html | 20 - .../Optimise/Algodiff/index.html | 11 - .../Optimise/Batch/index.html | 2 - .../Optimise/Checkpoint/index.html | 2 - .../Optimise/Clipping/index.html | 2 - .../Optimise/Gradient/index.html | 9 - .../Optimise/Learning_Rate/index.html | 2 - .../Optimise/Loss/index.html | 2 - .../Optimise/Momentum/index.html | 2 - .../Optimise/Params/index.html | 14 - .../Optimise/Regularisation/index.html | 2 - .../Optimise/Stopping/index.html | 2 - .../Optimise/Utils/index.html | 7 - .../argument-1-Neuron/Optimise/index.html | 31 - .../argument-1-Neuron/Padding1D/index.html | 2 - .../argument-1-Neuron/Padding2D/index.html | 2 - .../argument-1-Neuron/Padding3D/index.html | 2 - .../argument-1-Neuron/Recurrent/index.html | 9 - .../Make/argument-1-Neuron/Reshape/index.html | 2 - .../Make/argument-1-Neuron/Slice/index.html | 2 - .../TransposeConv1D/index.html | 8 - .../TransposeConv2D/index.html | 8 - .../TransposeConv3D/index.html | 8 - .../argument-1-Neuron/UpSampling1D/index.html | 2 - .../argument-1-Neuron/UpSampling2D/index.html | 2 - .../argument-1-Neuron/UpSampling3D/index.html | 2 - .../Make/argument-1-Neuron/index.html | 2 - owl-base/Owl_neural_graph/Make/index.html | 243 ------ owl-base/Owl_neural_graph_sig/.dummy | 0 .../Neuron/Activation/index.html | 2 - .../module-type-Sig/Neuron/Add/index.html | 2 - .../Neuron/AlphaDropout/index.html | 2 - .../module-type-Sig/Neuron/Average/index.html | 2 - .../Neuron/AvgPool1D/index.html | 2 - .../Neuron/AvgPool2D/index.html | 2 - .../Neuron/Concatenate/index.html | 2 - .../module-type-Sig/Neuron/Conv1D/index.html | 8 - .../module-type-Sig/Neuron/Conv2D/index.html | 8 - .../module-type-Sig/Neuron/Conv3D/index.html | 8 - .../Neuron/DilatedConv1D/index.html | 9 - .../Neuron/DilatedConv2D/index.html | 9 - .../Neuron/DilatedConv3D/index.html | 9 - .../module-type-Sig/Neuron/Dot/index.html | 2 - .../module-type-Sig/Neuron/Dropout/index.html | 2 - .../Neuron/Embedding/index.html | 2 - .../module-type-Sig/Neuron/Flatten/index.html | 2 - .../Neuron/FullyConnected/index.html | 2 - .../module-type-Sig/Neuron/GRU/index.html | 2 - .../Neuron/GaussianDropout/index.html | 2 - .../Neuron/GaussianNoise/index.html | 2 - .../Neuron/GlobalAvgPool1D/index.html | 2 - .../Neuron/GlobalAvgPool2D/index.html | 2 - .../Neuron/GlobalMaxPool1D/index.html | 2 - .../Neuron/GlobalMaxPool2D/index.html | 2 - .../module-type-Sig/Neuron/Init/index.html | 2 - .../module-type-Sig/Neuron/Input/index.html | 2 - .../module-type-Sig/Neuron/LSTM/index.html | 2 - .../module-type-Sig/Neuron/Lambda/index.html | 5 - .../Neuron/LambdaArray/index.html | 5 - .../module-type-Sig/Neuron/Linear/index.html | 2 - .../Neuron/LinearNoBias/index.html | 2 - .../module-type-Sig/Neuron/Masking/index.html | 2 - .../module-type-Sig/Neuron/Max/index.html | 2 - .../Neuron/MaxPool1D/index.html | 2 - .../Neuron/MaxPool2D/index.html | 2 - .../module-type-Sig/Neuron/Mul/index.html | 2 - .../Neuron/Normalisation/index.html | 8 - .../Optimise/Algodiff/A/Linalg/index.html | 6 - .../Neuron/Optimise/Algodiff/A/Mat/index.html | 2 - .../Optimise/Algodiff/A/Scalar/index.html | 2 - .../Neuron/Optimise/Algodiff/A/index.html | 158 ---- .../Neuron/Optimise/Algodiff/Arr/index.html | 2 - .../Optimise/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Optimise/Algodiff/Linalg/index.html | 6 - .../Neuron/Optimise/Algodiff/Mat/index.html | 2 - .../Neuron/Optimise/Algodiff/Maths/index.html | 2 - .../Neuron/Optimise/Algodiff/NN/index.html | 20 - .../Neuron/Optimise/Algodiff/index.html | 11 - .../Neuron/Optimise/Batch/index.html | 2 - .../Neuron/Optimise/Checkpoint/index.html | 2 - .../Neuron/Optimise/Clipping/index.html | 2 - .../Neuron/Optimise/Gradient/index.html | 9 - .../Neuron/Optimise/Learning_Rate/index.html | 2 - .../Neuron/Optimise/Loss/index.html | 2 - .../Neuron/Optimise/Momentum/index.html | 2 - .../Neuron/Optimise/Params/index.html | 14 - .../Neuron/Optimise/Regularisation/index.html | 2 - .../Neuron/Optimise/Stopping/index.html | 2 - .../Neuron/Optimise/Utils/index.html | 7 - .../Neuron/Optimise/index.html | 31 - .../Neuron/Padding1D/index.html | 2 - .../Neuron/Padding2D/index.html | 2 - .../Neuron/Padding3D/index.html | 2 - .../Neuron/Recurrent/index.html | 9 - .../module-type-Sig/Neuron/Reshape/index.html | 2 - .../module-type-Sig/Neuron/Slice/index.html | 2 - .../Neuron/TransposeConv1D/index.html | 8 - .../Neuron/TransposeConv2D/index.html | 8 - .../Neuron/TransposeConv3D/index.html | 8 - .../Neuron/UpSampling1D/index.html | 2 - .../Neuron/UpSampling2D/index.html | 2 - .../Neuron/UpSampling3D/index.html | 2 - .../module-type-Sig/Neuron/index.html | 2 - .../module-type-Sig/index.html | 243 ------ owl-base/Owl_neural_neuron/.dummy | 0 .../Make/Activation/index.html | 2 - .../Owl_neural_neuron/Make/Add/index.html | 2 - .../Make/AlphaDropout/index.html | 2 - .../Owl_neural_neuron/Make/Average/index.html | 2 - .../Make/AvgPool1D/index.html | 2 - .../Make/AvgPool2D/index.html | 2 - .../Make/Concatenate/index.html | 2 - .../Owl_neural_neuron/Make/Conv1D/index.html | 8 - .../Owl_neural_neuron/Make/Conv2D/index.html | 8 - .../Owl_neural_neuron/Make/Conv3D/index.html | 8 - .../Make/DilatedConv1D/index.html | 9 - .../Make/DilatedConv2D/index.html | 9 - .../Make/DilatedConv3D/index.html | 9 - .../Owl_neural_neuron/Make/Dot/index.html | 2 - .../Owl_neural_neuron/Make/Dropout/index.html | 2 - .../Make/Embedding/index.html | 2 - .../Owl_neural_neuron/Make/Flatten/index.html | 2 - .../Make/FullyConnected/index.html | 2 - .../Owl_neural_neuron/Make/GRU/index.html | 2 - .../Make/GaussianDropout/index.html | 2 - .../Make/GaussianNoise/index.html | 2 - .../Make/GlobalAvgPool1D/index.html | 2 - .../Make/GlobalAvgPool2D/index.html | 2 - .../Make/GlobalMaxPool1D/index.html | 2 - .../Make/GlobalMaxPool2D/index.html | 2 - .../Owl_neural_neuron/Make/Init/index.html | 2 - .../Owl_neural_neuron/Make/Input/index.html | 2 - .../Owl_neural_neuron/Make/LSTM/index.html | 2 - .../Owl_neural_neuron/Make/Lambda/index.html | 5 - .../Make/LambdaArray/index.html | 5 - .../Owl_neural_neuron/Make/Linear/index.html | 2 - .../Make/LinearNoBias/index.html | 2 - .../Owl_neural_neuron/Make/Masking/index.html | 2 - .../Owl_neural_neuron/Make/Max/index.html | 2 - .../Make/MaxPool1D/index.html | 2 - .../Make/MaxPool2D/index.html | 2 - .../Owl_neural_neuron/Make/Mul/index.html | 2 - .../Make/Normalisation/index.html | 8 - .../Make/Padding1D/index.html | 2 - .../Make/Padding2D/index.html | 2 - .../Make/Padding3D/index.html | 2 - .../Make/Recurrent/index.html | 9 - .../Owl_neural_neuron/Make/Reshape/index.html | 2 - .../Owl_neural_neuron/Make/Slice/index.html | 2 - .../Make/TransposeConv1D/index.html | 8 - .../Make/TransposeConv2D/index.html | 8 - .../Make/TransposeConv3D/index.html | 8 - .../Make/UpSampling1D/index.html | 2 - .../Make/UpSampling2D/index.html | 2 - .../Make/UpSampling3D/index.html | 2 - .../Algodiff/A/Linalg/index.html | 6 - .../Algodiff/A/Mat/index.html | 2 - .../Algodiff/A/Scalar/index.html | 2 - .../argument-1-Optimise/Algodiff/A/index.html | 158 ---- .../Algodiff/Arr/index.html | 2 - .../Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Algodiff/Linalg/index.html | 6 - .../Algodiff/Mat/index.html | 2 - .../Algodiff/Maths/index.html | 2 - .../Algodiff/NN/index.html | 20 - .../argument-1-Optimise/Algodiff/index.html | 11 - .../Make/argument-1-Optimise/Batch/index.html | 2 - .../argument-1-Optimise/Checkpoint/index.html | 2 - .../argument-1-Optimise/Clipping/index.html | 2 - .../argument-1-Optimise/Gradient/index.html | 9 - .../Learning_Rate/index.html | 2 - .../Make/argument-1-Optimise/Loss/index.html | 2 - .../argument-1-Optimise/Momentum/index.html | 2 - .../argument-1-Optimise/Params/index.html | 14 - .../Regularisation/index.html | 2 - .../argument-1-Optimise/Stopping/index.html | 2 - .../Make/argument-1-Optimise/Utils/index.html | 7 - .../Make/argument-1-Optimise/index.html | 31 - owl-base/Owl_neural_neuron/Make/index.html | 2 - owl-base/Owl_neural_neuron_sig/.dummy | 0 .../module-type-Sig/Activation/index.html | 2 - .../module-type-Sig/Add/index.html | 2 - .../module-type-Sig/AlphaDropout/index.html | 2 - .../module-type-Sig/Average/index.html | 2 - .../module-type-Sig/AvgPool1D/index.html | 2 - .../module-type-Sig/AvgPool2D/index.html | 2 - .../module-type-Sig/Concatenate/index.html | 2 - .../module-type-Sig/Conv1D/index.html | 8 - .../module-type-Sig/Conv2D/index.html | 8 - .../module-type-Sig/Conv3D/index.html | 8 - .../module-type-Sig/DilatedConv1D/index.html | 9 - .../module-type-Sig/DilatedConv2D/index.html | 9 - .../module-type-Sig/DilatedConv3D/index.html | 9 - .../module-type-Sig/Dot/index.html | 2 - .../module-type-Sig/Dropout/index.html | 2 - .../module-type-Sig/Embedding/index.html | 2 - .../module-type-Sig/Flatten/index.html | 2 - .../module-type-Sig/FullyConnected/index.html | 2 - .../module-type-Sig/GRU/index.html | 2 - .../GaussianDropout/index.html | 2 - .../module-type-Sig/GaussianNoise/index.html | 2 - .../GlobalAvgPool1D/index.html | 2 - .../GlobalAvgPool2D/index.html | 2 - .../GlobalMaxPool1D/index.html | 2 - .../GlobalMaxPool2D/index.html | 2 - .../module-type-Sig/Init/index.html | 2 - .../module-type-Sig/Input/index.html | 2 - .../module-type-Sig/LSTM/index.html | 2 - .../module-type-Sig/Lambda/index.html | 5 - .../module-type-Sig/LambdaArray/index.html | 5 - .../module-type-Sig/Linear/index.html | 2 - .../module-type-Sig/LinearNoBias/index.html | 2 - .../module-type-Sig/Masking/index.html | 2 - .../module-type-Sig/Max/index.html | 2 - .../module-type-Sig/MaxPool1D/index.html | 2 - .../module-type-Sig/MaxPool2D/index.html | 2 - .../module-type-Sig/Mul/index.html | 2 - .../module-type-Sig/Normalisation/index.html | 8 - .../Optimise/Algodiff/A/Linalg/index.html | 6 - .../Optimise/Algodiff/A/Mat/index.html | 2 - .../Optimise/Algodiff/A/Scalar/index.html | 2 - .../Optimise/Algodiff/A/index.html | 158 ---- .../Optimise/Algodiff/Arr/index.html | 2 - .../Optimise/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Optimise/Algodiff/Linalg/index.html | 6 - .../Optimise/Algodiff/Mat/index.html | 2 - .../Optimise/Algodiff/Maths/index.html | 2 - .../Optimise/Algodiff/NN/index.html | 20 - .../Optimise/Algodiff/index.html | 11 - .../module-type-Sig/Optimise/Batch/index.html | 2 - .../Optimise/Checkpoint/index.html | 2 - .../Optimise/Clipping/index.html | 2 - .../Optimise/Gradient/index.html | 9 - .../Optimise/Learning_Rate/index.html | 2 - .../module-type-Sig/Optimise/Loss/index.html | 2 - .../Optimise/Momentum/index.html | 2 - .../Optimise/Params/index.html | 14 - .../Optimise/Regularisation/index.html | 2 - .../Optimise/Stopping/index.html | 2 - .../module-type-Sig/Optimise/Utils/index.html | 7 - .../module-type-Sig/Optimise/index.html | 31 - .../module-type-Sig/Padding1D/index.html | 2 - .../module-type-Sig/Padding2D/index.html | 2 - .../module-type-Sig/Padding3D/index.html | 2 - .../module-type-Sig/Recurrent/index.html | 9 - .../module-type-Sig/Reshape/index.html | 2 - .../module-type-Sig/Slice/index.html | 2 - .../TransposeConv1D/index.html | 8 - .../TransposeConv2D/index.html | 8 - .../TransposeConv3D/index.html | 8 - .../module-type-Sig/UpSampling1D/index.html | 2 - .../module-type-Sig/UpSampling2D/index.html | 2 - .../module-type-Sig/UpSampling3D/index.html | 2 - .../module-type-Sig/index.html | 2 - owl-base/Owl_numdiff_generic/.dummy | 0 .../Make/argument-1-A/index.html | 158 ---- owl-base/Owl_numdiff_generic/Make/index.html | 2 - owl-base/Owl_numdiff_generic_sig/.dummy | 0 .../Impl/argument-1-A/index.html | 158 ---- .../Owl_numdiff_generic_sig/Impl/index.html | 2 - .../module-type-Sig/index.html | 2 - owl-base/Owl_operator/.dummy | 0 .../Make_Basic/argument-1-M/index.html | 2 - owl-base/Owl_operator/Make_Basic/index.html | 2 - .../Make_Extend/argument-1-M/index.html | 2 - owl-base/Owl_operator/Make_Extend/index.html | 2 - .../Make_Linalg/argument-1-M/index.html | 7 - owl-base/Owl_operator/Make_Linalg/index.html | 2 - .../Make_Matrix/argument-1-M/index.html | 2 - owl-base/Owl_operator/Make_Matrix/index.html | 2 - .../Make_Ndarray/argument-1-M/index.html | 2 - owl-base/Owl_operator/Make_Ndarray/index.html | 2 - owl-base/Owl_optimise_generic/.dummy | 0 .../Make/Batch/index.html | 2 - .../Make/Checkpoint/index.html | 2 - .../Make/Clipping/index.html | 2 - .../Make/Gradient/index.html | 9 - .../Make/Learning_Rate/index.html | 2 - .../Owl_optimise_generic/Make/Loss/index.html | 2 - .../Make/Momentum/index.html | 2 - .../Make/Params/index.html | 14 - .../Make/Regularisation/index.html | 2 - .../Make/Stopping/index.html | 2 - .../Make/Utils/index.html | 7 - .../argument-1-Algodiff/A/Linalg/index.html | 6 - .../Make/argument-1-Algodiff/A/Mat/index.html | 2 - .../argument-1-Algodiff/A/Scalar/index.html | 2 - .../Make/argument-1-Algodiff/A/index.html | 158 ---- .../Make/argument-1-Algodiff/Arr/index.html | 2 - .../argument-1-Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../argument-1-Algodiff/Linalg/index.html | 6 - .../Make/argument-1-Algodiff/Mat/index.html | 2 - .../Make/argument-1-Algodiff/Maths/index.html | 2 - .../Make/argument-1-Algodiff/NN/index.html | 20 - .../Make/argument-1-Algodiff/index.html | 11 - owl-base/Owl_optimise_generic/Make/index.html | 31 - owl-base/Owl_optimise_generic_sig/.dummy | 0 .../Algodiff/A/Linalg/index.html | 6 - .../module-type-Sig/Algodiff/A/Mat/index.html | 2 - .../Algodiff/A/Scalar/index.html | 2 - .../module-type-Sig/Algodiff/A/index.html | 158 ---- .../module-type-Sig/Algodiff/Arr/index.html | 2 - .../Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Algodiff/Linalg/index.html | 6 - .../module-type-Sig/Algodiff/Mat/index.html | 2 - .../module-type-Sig/Algodiff/Maths/index.html | 2 - .../module-type-Sig/Algodiff/NN/index.html | 20 - .../module-type-Sig/Algodiff/index.html | 11 - .../module-type-Sig/Batch/index.html | 2 - .../module-type-Sig/Checkpoint/index.html | 2 - .../module-type-Sig/Clipping/index.html | 2 - .../module-type-Sig/Gradient/index.html | 9 - .../module-type-Sig/Learning_Rate/index.html | 2 - .../module-type-Sig/Loss/index.html | 2 - .../module-type-Sig/Momentum/index.html | 2 - .../module-type-Sig/Params/index.html | 14 - .../module-type-Sig/Regularisation/index.html | 2 - .../module-type-Sig/Stopping/index.html | 2 - .../module-type-Sig/Utils/index.html | 7 - .../module-type-Sig/index.html | 31 - owl-base/Owl_pretty/.dummy | 0 owl-base/Owl_types/.dummy | 0 .../A/Linalg/index.html | 6 - .../A/Mat/index.html | 2 - .../A/Scalar/index.html | 2 - .../A/index.html | 379 --------- .../module-type-Computation_Device/index.html | 2 - .../Linalg/index.html | 6 - .../Mat/index.html | 2 - .../Scalar/index.html | 2 - .../module-type-Ndarray_Algodiff/index.html | 158 ---- .../module-type-Ndarray_Basic/index.html | 158 ---- .../module-type-Ndarray_Compare/index.html | 158 ---- .../Linalg/index.html | 6 - .../Mat/index.html | 2 - .../Scalar/index.html | 2 - .../module-type-Ndarray_Mutable/index.html | 379 --------- .../module-type-Ndarray_Numdiff/index.html | 158 ---- .../module-type-Stats_Dist/Linalg/index.html | 6 - .../module-type-Stats_Dist/Mat/index.html | 2 - .../module-type-Stats_Dist/Scalar/index.html | 2 - .../module-type-Stats_Dist/index.html | 379 --------- owl-base/Owl_types_common/.dummy | 0 owl-base/Owl_types_computation_device/.dummy | 0 .../module-type-Sig/A/Linalg/index.html | 6 - .../module-type-Sig/A/Mat/index.html | 2 - .../module-type-Sig/A/Scalar/index.html | 2 - .../module-type-Sig/A/index.html | 379 --------- .../module-type-Sig/index.html | 2 - owl-base/Owl_types_computation_engine/.dummy | 0 .../Optimiser/Operator/Linalg/index.html | 33 - .../Graph/Optimiser/Operator/Mat/index.html | 2 - .../Optimiser/Operator/Scalar/index.html | 20 - .../Shape/Type/Device/A/Linalg/index.html | 6 - .../Symbol/Shape/Type/Device/A/Mat/index.html | 2 - .../Shape/Type/Device/A/Scalar/index.html | 2 - .../Symbol/Shape/Type/Device/A/index.html | 379 --------- .../Symbol/Shape/Type/Device/index.html | 2 - .../Operator/Symbol/Shape/Type/index.html | 5 - .../Operator/Symbol/Shape/index.html | 5 - .../Optimiser/Operator/Symbol/index.html | 28 - .../Graph/Optimiser/Operator/index.html | 420 --------- .../Graph/Optimiser/index.html | 4 - .../module-type-Sig/Graph/index.html | 33 - .../module-type-Sig/index.html | 2 - owl-base/Owl_types_maths_basic/.dummy | 0 .../module-type-Sig/index.html | 2 - owl-base/Owl_types_ndarray_algodiff/.dummy | 0 .../module-type-Sig/Linalg/index.html | 6 - .../module-type-Sig/Mat/index.html | 2 - .../module-type-Sig/Scalar/index.html | 2 - .../module-type-Sig/index.html | 158 ---- owl-base/Owl_types_ndarray_basic/.dummy | 0 .../module-type-Sig/index.html | 158 ---- owl-base/Owl_types_ndarray_compare/.dummy | 0 .../module-type-Sig/index.html | 158 ---- owl-base/Owl_types_ndarray_eltcmp/.dummy | 0 .../module-type-Sig/index.html | 158 ---- owl-base/Owl_types_ndarray_mutable/.dummy | 0 .../module-type-Sig/Linalg/index.html | 6 - .../module-type-Sig/Mat/index.html | 2 - .../module-type-Sig/Scalar/index.html | 2 - .../module-type-Sig/index.html | 379 --------- owl-base/Owl_types_ndarray_numdiff/.dummy | 0 .../module-type-Sig/index.html | 158 ---- owl-base/Owl_types_operator/.dummy | 0 .../module-type-BasicSig/index.html | 2 - .../module-type-ExtendSig/index.html | 2 - .../module-type-LinalgSig/index.html | 7 - .../module-type-MatrixSig/index.html | 2 - .../module-type-NdarraySig/index.html | 2 - owl-base/Owl_types_stats_basic/.dummy | 0 owl-base/Owl_types_stats_dist/.dummy | 0 .../module-type-Sig/Linalg/index.html | 6 - .../module-type-Sig/Mat/index.html | 2 - .../module-type-Sig/Scalar/index.html | 2 - .../module-type-Sig/index.html | 379 --------- owl-base/Owl_utils/.dummy | 0 owl-base/Owl_utils_array/.dummy | 0 owl-base/Owl_utils_heap/.dummy | 0 owl-base/Owl_utils_infer_shape/.dummy | 0 owl-base/Owl_utils_multimap/.dummy | 0 owl-base/Owl_utils_multimap/Make/index.html | 2 - owl-base/Owl_utils_ndarray/.dummy | 0 owl-base/Owl_utils_stack/.dummy | 0 owl-base/Owl_view/.dummy | 0 .../Owl_view/Make/argument-1-A/index.html | 158 ---- owl-base/Owl_view/Make/index.html | 2 - owl-top/Owl_top/.dummy | 0 owl/Owl/.dummy | 0 owl/Owl/Arr/index.html | 579 ------------- owl/Owl/Mat/index.html | 225 ----- owl/Owl_algodiff/.dummy | 0 owl/Owl_algodiff/D/A/Linalg/index.html | 6 - owl/Owl_algodiff/D/A/Mat/index.html | 2 - owl/Owl_algodiff/D/A/Scalar/index.html | 2 - owl/Owl_algodiff/D/A/index.html | 158 ---- owl/Owl_algodiff/D/Arr/index.html | 2 - owl/Owl_algodiff/D/Builder/index.html | 2 - .../D/Builder/module-type-Aiso/index.html | 2 - .../D/Builder/module-type-Piso/index.html | 2 - .../D/Builder/module-type-Siao/index.html | 2 - .../D/Builder/module-type-Sipo/index.html | 7 - .../D/Builder/module-type-Siso/index.html | 2 - .../D/Builder/module-type-Sito/index.html | 7 - owl/Owl_algodiff/D/Linalg/index.html | 6 - owl/Owl_algodiff/D/Mat/index.html | 2 - owl/Owl_algodiff/D/Maths/index.html | 2 - owl/Owl_algodiff/D/NN/index.html | 20 - owl/Owl_algodiff/D/index.html | 2 - owl/Owl_algodiff/S/A/Linalg/index.html | 6 - owl/Owl_algodiff/S/A/Mat/index.html | 2 - owl/Owl_algodiff/S/A/Scalar/index.html | 2 - owl/Owl_algodiff/S/A/index.html | 158 ---- owl/Owl_algodiff/S/Arr/index.html | 2 - owl/Owl_algodiff/S/Builder/index.html | 2 - .../S/Builder/module-type-Aiso/index.html | 2 - .../S/Builder/module-type-Piso/index.html | 2 - .../S/Builder/module-type-Siao/index.html | 2 - .../S/Builder/module-type-Sipo/index.html | 7 - .../S/Builder/module-type-Siso/index.html | 2 - .../S/Builder/module-type-Sito/index.html | 7 - owl/Owl_algodiff/S/Linalg/index.html | 6 - owl/Owl_algodiff/S/Mat/index.html | 2 - owl/Owl_algodiff/S/Maths/index.html | 2 - owl/Owl_algodiff/S/NN/index.html | 20 - owl/Owl_algodiff/S/index.html | 2 - owl/Owl_algodiff_primal_ops/.dummy | 0 .../D/Linalg/index.html | 25 - owl/Owl_algodiff_primal_ops/D/Mat/index.html | 2 - owl/Owl_algodiff_primal_ops/D/index.html | 579 ------------- .../S/Linalg/index.html | 25 - owl/Owl_algodiff_primal_ops/S/Mat/index.html | 2 - owl/Owl_algodiff_primal_ops/S/index.html | 579 ------------- owl/Owl_cblas/.dummy | 0 owl/Owl_cblas_basic/.dummy | 0 owl/Owl_cblas_generated/.dummy | 0 owl/Owl_cluster/.dummy | 0 owl/Owl_core_types/.dummy | 0 owl/Owl_dataset/.dummy | 0 owl/Owl_dense/.dummy | 0 owl/Owl_dense_matrix/.dummy | 0 owl/Owl_dense_matrix/C/index.html | 215 ----- owl/Owl_dense_matrix/D/index.html | 225 ----- owl/Owl_dense_matrix/Generic/index.html | 402 --------- owl/Owl_dense_matrix/Operator/index.html | 180 ---- owl/Owl_dense_matrix/S/index.html | 225 ----- owl/Owl_dense_matrix/Z/index.html | 215 ----- owl/Owl_dense_matrix_c/.dummy | 0 owl/Owl_dense_matrix_d/.dummy | 0 owl/Owl_dense_matrix_generic/.dummy | 0 owl/Owl_dense_matrix_intf/.dummy | 0 .../module-type-Common/index.html | 34 - .../module-type-Complex/index.html | 2 - .../module-type-Real/index.html | 12 - owl/Owl_dense_matrix_s/.dummy | 0 owl/Owl_dense_matrix_z/.dummy | 0 owl/Owl_dense_ndarray/.dummy | 0 owl/Owl_dense_ndarray/Any/index.html | 6 - owl/Owl_dense_ndarray/C/index.html | 582 ------------- owl/Owl_dense_ndarray/D/index.html | 579 ------------- owl/Owl_dense_ndarray/Generic/index.html | 805 ------------------ owl/Owl_dense_ndarray/Operator/index.html | 171 ---- owl/Owl_dense_ndarray/S/index.html | 579 ------------- owl/Owl_dense_ndarray/Z/index.html | 582 ------------- owl/Owl_dense_ndarray_a/.dummy | 0 owl/Owl_dense_ndarray_c/.dummy | 0 owl/Owl_dense_ndarray_d/.dummy | 0 owl/Owl_dense_ndarray_generic/.dummy | 0 owl/Owl_dense_ndarray_intf/.dummy | 0 .../module-type-Common/index.html | 34 - .../module-type-Complex/index.html | 2 - .../module-type-Distribution/index.html | 2 - .../module-type-NN/index.html | 372 -------- .../module-type-Real/index.html | 2 - owl/Owl_dense_ndarray_s/.dummy | 0 owl/Owl_dense_ndarray_z/.dummy | 0 owl/Owl_distribution/.dummy | 0 owl/Owl_distribution/Make/Beta/index.html | 2 - owl/Owl_distribution/Make/Cauchy/index.html | 2 - owl/Owl_distribution/Make/Chi2/index.html | 2 - .../Make/Exponential/index.html | 2 - owl/Owl_distribution/Make/F/index.html | 2 - owl/Owl_distribution/Make/Gamma/index.html | 2 - owl/Owl_distribution/Make/Gaussian/index.html | 2 - owl/Owl_distribution/Make/Gumbel1/index.html | 2 - owl/Owl_distribution/Make/Gumbel2/index.html | 2 - owl/Owl_distribution/Make/Laplace/index.html | 2 - owl/Owl_distribution/Make/Logistic/index.html | 2 - .../Make/Lognormal/index.html | 2 - owl/Owl_distribution/Make/Lomax/index.html | 2 - owl/Owl_distribution/Make/Poisson/index.html | 2 - owl/Owl_distribution/Make/Rayleigh/index.html | 2 - owl/Owl_distribution/Make/Uniform/index.html | 2 - owl/Owl_distribution/Make/Weibull/index.html | 2 - .../Make/argument-1-A/Linalg/index.html | 6 - .../Make/argument-1-A/Mat/index.html | 2 - .../Make/argument-1-A/Scalar/index.html | 2 - .../Make/argument-1-A/index.html | 379 --------- owl/Owl_distribution/Make/index.html | 2 - owl/Owl_distribution_common/.dummy | 0 owl/Owl_distribution_generic/.dummy | 0 owl/Owl_fft/.dummy | 0 owl/Owl_fft/D/index.html | 19 - owl/Owl_fft/Generic/index.html | 21 - owl/Owl_fft/S/index.html | 19 - owl/Owl_fft_d/.dummy | 0 owl/Owl_fft_generic/.dummy | 0 owl/Owl_fft_s/.dummy | 0 owl/Owl_fftpack/.dummy | 0 owl/Owl_lapacke/.dummy | 0 owl/Owl_lapacke_generated/.dummy | 0 owl/Owl_linalg/.dummy | 0 owl/Owl_linalg/C/index.html | 25 - owl/Owl_linalg/D/index.html | 25 - owl/Owl_linalg/Generic/index.html | 81 -- owl/Owl_linalg/S/index.html | 25 - owl/Owl_linalg/Z/index.html | 25 - owl/Owl_linalg_c/.dummy | 0 owl/Owl_linalg_d/.dummy | 0 owl/Owl_linalg_generic/.dummy | 0 owl/Owl_linalg_intf/.dummy | 0 .../module-type-Common/index.html | 17 - .../module-type-Real/index.html | 2 - owl/Owl_linalg_s/.dummy | 0 owl/Owl_linalg_z/.dummy | 0 owl/Owl_maths/.dummy | 0 owl/Owl_maths_special/.dummy | 0 owl/Owl_matrix/.dummy | 0 owl/Owl_matrix_check/.dummy | 0 owl/Owl_matrix_swap/.dummy | 0 owl/Owl_ndarray/.dummy | 0 owl/Owl_ndarray_contract/.dummy | 0 owl/Owl_ndarray_conv/.dummy | 0 owl/Owl_ndarray_fma/.dummy | 0 owl/Owl_ndarray_maths/.dummy | 0 owl/Owl_ndarray_pool/.dummy | 0 owl/Owl_ndarray_repeat/.dummy | 0 owl/Owl_ndarray_slide/.dummy | 0 owl/Owl_ndarray_sort/.dummy | 0 owl/Owl_ndarray_transpose/.dummy | 0 owl/Owl_ndarray_upsampling/.dummy | 0 owl/Owl_ndarray_utils/.dummy | 0 owl/Owl_neural/.dummy | 0 .../D/Graph/Neuron/Activation/index.html | 6 - owl/Owl_neural/D/Graph/Neuron/Add/index.html | 4 - .../D/Graph/Neuron/AlphaDropout/index.html | 4 - .../D/Graph/Neuron/Average/index.html | 4 - .../D/Graph/Neuron/AvgPool1D/index.html | 4 - .../D/Graph/Neuron/AvgPool2D/index.html | 4 - .../D/Graph/Neuron/Concatenate/index.html | 4 - .../D/Graph/Neuron/Conv1D/index.html | 10 - .../D/Graph/Neuron/Conv2D/index.html | 10 - .../D/Graph/Neuron/Conv3D/index.html | 10 - .../D/Graph/Neuron/DilatedConv1D/index.html | 11 - .../D/Graph/Neuron/DilatedConv2D/index.html | 11 - .../D/Graph/Neuron/DilatedConv3D/index.html | 11 - owl/Owl_neural/D/Graph/Neuron/Dot/index.html | 4 - .../D/Graph/Neuron/Dropout/index.html | 4 - .../D/Graph/Neuron/Embedding/index.html | 4 - .../D/Graph/Neuron/Flatten/index.html | 4 - .../D/Graph/Neuron/FullyConnected/index.html | 4 - owl/Owl_neural/D/Graph/Neuron/GRU/index.html | 4 - .../D/Graph/Neuron/GaussianDropout/index.html | 4 - .../D/Graph/Neuron/GaussianNoise/index.html | 4 - .../D/Graph/Neuron/GlobalAvgPool1D/index.html | 4 - .../D/Graph/Neuron/GlobalAvgPool2D/index.html | 4 - .../D/Graph/Neuron/GlobalMaxPool1D/index.html | 4 - .../D/Graph/Neuron/GlobalMaxPool2D/index.html | 4 - owl/Owl_neural/D/Graph/Neuron/Init/index.html | 4 - .../D/Graph/Neuron/Input/index.html | 4 - owl/Owl_neural/D/Graph/Neuron/LSTM/index.html | 4 - .../D/Graph/Neuron/Lambda/index.html | 7 - .../D/Graph/Neuron/LambdaArray/index.html | 7 - .../D/Graph/Neuron/Linear/index.html | 4 - .../D/Graph/Neuron/LinearNoBias/index.html | 4 - .../D/Graph/Neuron/Masking/index.html | 2 - owl/Owl_neural/D/Graph/Neuron/Max/index.html | 4 - .../D/Graph/Neuron/MaxPool1D/index.html | 4 - .../D/Graph/Neuron/MaxPool2D/index.html | 4 - owl/Owl_neural/D/Graph/Neuron/Mul/index.html | 4 - .../D/Graph/Neuron/Normalisation/index.html | 10 - .../Optimise/Algodiff/A/Linalg/index.html | 6 - .../Neuron/Optimise/Algodiff/A/Mat/index.html | 2 - .../Optimise/Algodiff/A/Scalar/index.html | 2 - .../Neuron/Optimise/Algodiff/A/index.html | 160 ---- .../Neuron/Optimise/Algodiff/Arr/index.html | 2 - .../Optimise/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Optimise/Algodiff/Linalg/index.html | 6 - .../Neuron/Optimise/Algodiff/Mat/index.html | 2 - .../Neuron/Optimise/Algodiff/Maths/index.html | 2 - .../Neuron/Optimise/Algodiff/NN/index.html | 20 - .../Graph/Neuron/Optimise/Algodiff/index.html | 4 - .../D/Graph/Neuron/Optimise/Batch/index.html | 4 - .../Neuron/Optimise/Checkpoint/index.html | 6 - .../Graph/Neuron/Optimise/Clipping/index.html | 4 - .../Graph/Neuron/Optimise/Gradient/index.html | 11 - .../Neuron/Optimise/Learning_Rate/index.html | 4 - .../D/Graph/Neuron/Optimise/Loss/index.html | 4 - .../Graph/Neuron/Optimise/Momentum/index.html | 4 - .../D/Graph/Neuron/Optimise/Params/index.html | 16 - .../Neuron/Optimise/Regularisation/index.html | 4 - .../Graph/Neuron/Optimise/Stopping/index.html | 4 - .../D/Graph/Neuron/Optimise/Utils/index.html | 7 - .../D/Graph/Neuron/Optimise/index.html | 31 - .../D/Graph/Neuron/Padding1D/index.html | 2 - .../D/Graph/Neuron/Padding2D/index.html | 4 - .../D/Graph/Neuron/Padding3D/index.html | 2 - .../D/Graph/Neuron/Recurrent/index.html | 11 - .../D/Graph/Neuron/Reshape/index.html | 4 - .../D/Graph/Neuron/Slice/index.html | 4 - .../D/Graph/Neuron/TransposeConv1D/index.html | 10 - .../D/Graph/Neuron/TransposeConv2D/index.html | 10 - .../D/Graph/Neuron/TransposeConv3D/index.html | 10 - .../D/Graph/Neuron/UpSampling1D/index.html | 2 - .../D/Graph/Neuron/UpSampling2D/index.html | 4 - .../D/Graph/Neuron/UpSampling3D/index.html | 2 - owl/Owl_neural/D/Graph/Neuron/index.html | 4 - owl/Owl_neural/D/Graph/index.html | 245 ------ owl/Owl_neural/D/index.html | 2 - .../S/Graph/Neuron/Activation/index.html | 6 - owl/Owl_neural/S/Graph/Neuron/Add/index.html | 4 - .../S/Graph/Neuron/AlphaDropout/index.html | 4 - .../S/Graph/Neuron/Average/index.html | 4 - .../S/Graph/Neuron/AvgPool1D/index.html | 4 - .../S/Graph/Neuron/AvgPool2D/index.html | 4 - .../S/Graph/Neuron/Concatenate/index.html | 4 - .../S/Graph/Neuron/Conv1D/index.html | 10 - .../S/Graph/Neuron/Conv2D/index.html | 10 - .../S/Graph/Neuron/Conv3D/index.html | 10 - .../S/Graph/Neuron/DilatedConv1D/index.html | 11 - .../S/Graph/Neuron/DilatedConv2D/index.html | 11 - .../S/Graph/Neuron/DilatedConv3D/index.html | 11 - owl/Owl_neural/S/Graph/Neuron/Dot/index.html | 4 - .../S/Graph/Neuron/Dropout/index.html | 4 - .../S/Graph/Neuron/Embedding/index.html | 4 - .../S/Graph/Neuron/Flatten/index.html | 4 - .../S/Graph/Neuron/FullyConnected/index.html | 4 - owl/Owl_neural/S/Graph/Neuron/GRU/index.html | 4 - .../S/Graph/Neuron/GaussianDropout/index.html | 4 - .../S/Graph/Neuron/GaussianNoise/index.html | 4 - .../S/Graph/Neuron/GlobalAvgPool1D/index.html | 4 - .../S/Graph/Neuron/GlobalAvgPool2D/index.html | 4 - .../S/Graph/Neuron/GlobalMaxPool1D/index.html | 4 - .../S/Graph/Neuron/GlobalMaxPool2D/index.html | 4 - owl/Owl_neural/S/Graph/Neuron/Init/index.html | 4 - .../S/Graph/Neuron/Input/index.html | 4 - owl/Owl_neural/S/Graph/Neuron/LSTM/index.html | 4 - .../S/Graph/Neuron/Lambda/index.html | 7 - .../S/Graph/Neuron/LambdaArray/index.html | 7 - .../S/Graph/Neuron/Linear/index.html | 4 - .../S/Graph/Neuron/LinearNoBias/index.html | 4 - .../S/Graph/Neuron/Masking/index.html | 2 - owl/Owl_neural/S/Graph/Neuron/Max/index.html | 4 - .../S/Graph/Neuron/MaxPool1D/index.html | 4 - .../S/Graph/Neuron/MaxPool2D/index.html | 4 - owl/Owl_neural/S/Graph/Neuron/Mul/index.html | 4 - .../S/Graph/Neuron/Normalisation/index.html | 10 - .../Optimise/Algodiff/A/Linalg/index.html | 6 - .../Neuron/Optimise/Algodiff/A/Mat/index.html | 2 - .../Optimise/Algodiff/A/Scalar/index.html | 2 - .../Neuron/Optimise/Algodiff/A/index.html | 160 ---- .../Neuron/Optimise/Algodiff/Arr/index.html | 2 - .../Optimise/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Optimise/Algodiff/Linalg/index.html | 6 - .../Neuron/Optimise/Algodiff/Mat/index.html | 2 - .../Neuron/Optimise/Algodiff/Maths/index.html | 2 - .../Neuron/Optimise/Algodiff/NN/index.html | 20 - .../Graph/Neuron/Optimise/Algodiff/index.html | 4 - .../S/Graph/Neuron/Optimise/Batch/index.html | 4 - .../Neuron/Optimise/Checkpoint/index.html | 6 - .../Graph/Neuron/Optimise/Clipping/index.html | 4 - .../Graph/Neuron/Optimise/Gradient/index.html | 11 - .../Neuron/Optimise/Learning_Rate/index.html | 4 - .../S/Graph/Neuron/Optimise/Loss/index.html | 4 - .../Graph/Neuron/Optimise/Momentum/index.html | 4 - .../S/Graph/Neuron/Optimise/Params/index.html | 16 - .../Neuron/Optimise/Regularisation/index.html | 4 - .../Graph/Neuron/Optimise/Stopping/index.html | 4 - .../S/Graph/Neuron/Optimise/Utils/index.html | 7 - .../S/Graph/Neuron/Optimise/index.html | 31 - .../S/Graph/Neuron/Padding1D/index.html | 2 - .../S/Graph/Neuron/Padding2D/index.html | 4 - .../S/Graph/Neuron/Padding3D/index.html | 2 - .../S/Graph/Neuron/Recurrent/index.html | 11 - .../S/Graph/Neuron/Reshape/index.html | 4 - .../S/Graph/Neuron/Slice/index.html | 4 - .../S/Graph/Neuron/TransposeConv1D/index.html | 10 - .../S/Graph/Neuron/TransposeConv2D/index.html | 10 - .../S/Graph/Neuron/TransposeConv3D/index.html | 10 - .../S/Graph/Neuron/UpSampling1D/index.html | 2 - .../S/Graph/Neuron/UpSampling2D/index.html | 4 - .../S/Graph/Neuron/UpSampling3D/index.html | 2 - owl/Owl_neural/S/Graph/Neuron/index.html | 4 - owl/Owl_neural/S/Graph/index.html | 245 ------ owl/Owl_neural/S/index.html | 2 - owl/Owl_neural_parallel/.dummy | 0 .../Make/argument-1-M/index.html | 9 - .../Make/argument-2-E/index.html | 2 - owl/Owl_neural_parallel/Make/index.html | 22 - .../module-type-EngineSig/index.html | 2 - .../module-type-ModelSig/index.html | 9 - owl/Owl_nlp/.dummy | 0 owl/Owl_nlp_corpus/.dummy | 0 owl/Owl_nlp_lda/.dummy | 0 owl/Owl_nlp_similarity/.dummy | 0 owl/Owl_nlp_tfidf/.dummy | 0 owl/Owl_nlp_utils/.dummy | 0 owl/Owl_nlp_vocabulary/.dummy | 0 owl/Owl_optimise/.dummy | 0 .../D/Algodiff/A/Linalg/index.html | 6 - owl/Owl_optimise/D/Algodiff/A/Mat/index.html | 2 - .../D/Algodiff/A/Scalar/index.html | 2 - owl/Owl_optimise/D/Algodiff/A/index.html | 158 ---- owl/Owl_optimise/D/Algodiff/Arr/index.html | 2 - .../D/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - owl/Owl_optimise/D/Algodiff/Linalg/index.html | 6 - owl/Owl_optimise/D/Algodiff/Mat/index.html | 2 - owl/Owl_optimise/D/Algodiff/Maths/index.html | 2 - owl/Owl_optimise/D/Algodiff/NN/index.html | 20 - owl/Owl_optimise/D/Algodiff/index.html | 2 - owl/Owl_optimise/D/Batch/index.html | 4 - owl/Owl_optimise/D/Checkpoint/index.html | 6 - owl/Owl_optimise/D/Clipping/index.html | 4 - owl/Owl_optimise/D/Gradient/index.html | 11 - owl/Owl_optimise/D/Learning_Rate/index.html | 4 - owl/Owl_optimise/D/Loss/index.html | 4 - owl/Owl_optimise/D/Momentum/index.html | 4 - owl/Owl_optimise/D/Params/index.html | 16 - owl/Owl_optimise/D/Regularisation/index.html | 4 - owl/Owl_optimise/D/Stopping/index.html | 4 - owl/Owl_optimise/D/Utils/index.html | 7 - owl/Owl_optimise/D/index.html | 31 - .../Algodiff/A/Linalg/index.html | 6 - .../Make_Embedded/Algodiff/A/Mat/index.html | 2 - .../Algodiff/A/Scalar/index.html | 2 - .../Make_Embedded/Algodiff/A/index.html | 158 ---- .../Make_Embedded/Algodiff/Arr/index.html | 2 - .../Make_Embedded/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Make_Embedded/Algodiff/Linalg/index.html | 6 - .../Make_Embedded/Algodiff/Mat/index.html | 2 - .../Make_Embedded/Algodiff/Maths/index.html | 2 - .../Make_Embedded/Algodiff/NN/index.html | 20 - .../Make_Embedded/Algodiff/index.html | 2 - .../Make_Embedded/Batch/index.html | 2 - .../Make_Embedded/Checkpoint/index.html | 6 - .../Make_Embedded/Clipping/index.html | 3 - .../Make_Embedded/Gradient/index.html | 10 - .../Make_Embedded/Learning_Rate/index.html | 4 - .../Make_Embedded/Loss/index.html | 2 - .../Make_Embedded/Momentum/index.html | 3 - .../Make_Embedded/Params/index.html | 14 - .../Make_Embedded/Regularisation/index.html | 4 - .../Make_Embedded/Stopping/index.html | 3 - .../Make_Embedded/Utils/index.html | 7 - .../argument-1-A/Linalg/index.html | 6 - .../Make_Embedded/argument-1-A/Mat/index.html | 2 - .../argument-1-A/Scalar/index.html | 2 - .../Make_Embedded/argument-1-A/index.html | 158 ---- owl/Owl_optimise/Make_Embedded/index.html | 31 - .../S/Algodiff/A/Linalg/index.html | 6 - owl/Owl_optimise/S/Algodiff/A/Mat/index.html | 2 - .../S/Algodiff/A/Scalar/index.html | 2 - owl/Owl_optimise/S/Algodiff/A/index.html | 158 ---- owl/Owl_optimise/S/Algodiff/Arr/index.html | 2 - .../S/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - owl/Owl_optimise/S/Algodiff/Linalg/index.html | 6 - owl/Owl_optimise/S/Algodiff/Mat/index.html | 2 - owl/Owl_optimise/S/Algodiff/Maths/index.html | 2 - owl/Owl_optimise/S/Algodiff/NN/index.html | 20 - owl/Owl_optimise/S/Algodiff/index.html | 2 - owl/Owl_optimise/S/Batch/index.html | 4 - owl/Owl_optimise/S/Checkpoint/index.html | 6 - owl/Owl_optimise/S/Clipping/index.html | 4 - owl/Owl_optimise/S/Gradient/index.html | 11 - owl/Owl_optimise/S/Learning_Rate/index.html | 4 - owl/Owl_optimise/S/Loss/index.html | 4 - owl/Owl_optimise/S/Momentum/index.html | 4 - owl/Owl_optimise/S/Params/index.html | 16 - owl/Owl_optimise/S/Regularisation/index.html | 4 - owl/Owl_optimise/S/Stopping/index.html | 4 - owl/Owl_optimise/S/Utils/index.html | 7 - owl/Owl_optimise/S/index.html | 31 - owl/Owl_regression/.dummy | 0 .../D/Optimise/Algodiff/A/Linalg/index.html | 6 - .../D/Optimise/Algodiff/A/Mat/index.html | 2 - .../D/Optimise/Algodiff/A/Scalar/index.html | 2 - .../D/Optimise/Algodiff/A/index.html | 160 ---- .../D/Optimise/Algodiff/Arr/index.html | 2 - .../D/Optimise/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../D/Optimise/Algodiff/Linalg/index.html | 6 - .../D/Optimise/Algodiff/Mat/index.html | 2 - .../D/Optimise/Algodiff/Maths/index.html | 2 - .../D/Optimise/Algodiff/NN/index.html | 20 - .../D/Optimise/Algodiff/index.html | 4 - .../D/Optimise/Batch/index.html | 4 - .../D/Optimise/Checkpoint/index.html | 6 - .../D/Optimise/Clipping/index.html | 4 - .../D/Optimise/Gradient/index.html | 11 - .../D/Optimise/Learning_Rate/index.html | 4 - owl/Owl_regression/D/Optimise/Loss/index.html | 4 - .../D/Optimise/Momentum/index.html | 4 - .../D/Optimise/Params/index.html | 16 - .../D/Optimise/Regularisation/index.html | 4 - .../D/Optimise/Stopping/index.html | 4 - .../D/Optimise/Utils/index.html | 7 - owl/Owl_regression/D/Optimise/index.html | 31 - owl/Owl_regression/D/index.html | 44 - .../Optimise/Algodiff/A/Linalg/index.html | 6 - .../Optimise/Algodiff/A/Mat/index.html | 2 - .../Optimise/Algodiff/A/Scalar/index.html | 2 - .../Optimise/Algodiff/A/index.html | 160 ---- .../Optimise/Algodiff/Arr/index.html | 2 - .../Optimise/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Optimise/Algodiff/Linalg/index.html | 6 - .../Optimise/Algodiff/Mat/index.html | 2 - .../Optimise/Algodiff/Maths/index.html | 2 - .../Optimise/Algodiff/NN/index.html | 20 - .../Optimise/Algodiff/index.html | 2 - .../Make_Embedded/Optimise/Batch/index.html | 2 - .../Optimise/Checkpoint/index.html | 6 - .../Optimise/Clipping/index.html | 3 - .../Optimise/Gradient/index.html | 10 - .../Optimise/Learning_Rate/index.html | 4 - .../Make_Embedded/Optimise/Loss/index.html | 2 - .../Optimise/Momentum/index.html | 3 - .../Make_Embedded/Optimise/Params/index.html | 14 - .../Optimise/Regularisation/index.html | 4 - .../Optimise/Stopping/index.html | 3 - .../Make_Embedded/Optimise/Utils/index.html | 7 - .../Make_Embedded/Optimise/index.html | 31 - .../argument-1-A/Linalg/index.html | 6 - .../Make_Embedded/argument-1-A/Mat/index.html | 2 - .../argument-1-A/Scalar/index.html | 2 - .../Make_Embedded/argument-1-A/index.html | 158 ---- owl/Owl_regression/Make_Embedded/index.html | 44 - .../S/Optimise/Algodiff/A/Linalg/index.html | 6 - .../S/Optimise/Algodiff/A/Mat/index.html | 2 - .../S/Optimise/Algodiff/A/Scalar/index.html | 2 - .../S/Optimise/Algodiff/A/index.html | 160 ---- .../S/Optimise/Algodiff/Arr/index.html | 2 - .../S/Optimise/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../S/Optimise/Algodiff/Linalg/index.html | 6 - .../S/Optimise/Algodiff/Mat/index.html | 2 - .../S/Optimise/Algodiff/Maths/index.html | 2 - .../S/Optimise/Algodiff/NN/index.html | 20 - .../S/Optimise/Algodiff/index.html | 4 - .../S/Optimise/Batch/index.html | 4 - .../S/Optimise/Checkpoint/index.html | 6 - .../S/Optimise/Clipping/index.html | 4 - .../S/Optimise/Gradient/index.html | 11 - .../S/Optimise/Learning_Rate/index.html | 4 - owl/Owl_regression/S/Optimise/Loss/index.html | 4 - .../S/Optimise/Momentum/index.html | 4 - .../S/Optimise/Params/index.html | 16 - .../S/Optimise/Regularisation/index.html | 4 - .../S/Optimise/Stopping/index.html | 4 - .../S/Optimise/Utils/index.html | 7 - owl/Owl_regression/S/Optimise/index.html | 31 - owl/Owl_regression/S/index.html | 44 - owl/Owl_regression_generic/.dummy | 0 .../Algodiff/A/Linalg/index.html | 6 - .../Algodiff/A/Mat/index.html | 2 - .../Algodiff/A/Scalar/index.html | 2 - .../argument-1-Optimise/Algodiff/A/index.html | 158 ---- .../Algodiff/Arr/index.html | 2 - .../Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Algodiff/Linalg/index.html | 6 - .../Algodiff/Mat/index.html | 2 - .../Algodiff/Maths/index.html | 2 - .../Algodiff/NN/index.html | 20 - .../argument-1-Optimise/Algodiff/index.html | 11 - .../Make/argument-1-Optimise/Batch/index.html | 2 - .../argument-1-Optimise/Checkpoint/index.html | 2 - .../argument-1-Optimise/Clipping/index.html | 2 - .../argument-1-Optimise/Gradient/index.html | 9 - .../Learning_Rate/index.html | 2 - .../Make/argument-1-Optimise/Loss/index.html | 2 - .../argument-1-Optimise/Momentum/index.html | 2 - .../argument-1-Optimise/Params/index.html | 14 - .../Regularisation/index.html | 2 - .../argument-1-Optimise/Stopping/index.html | 2 - .../Make/argument-1-Optimise/Utils/index.html | 7 - .../Make/argument-1-Optimise/index.html | 31 - owl/Owl_regression_generic/Make/index.html | 44 - owl/Owl_regression_generic_sig/.dummy | 0 .../Optimise/Algodiff/A/Linalg/index.html | 6 - .../Optimise/Algodiff/A/Mat/index.html | 2 - .../Optimise/Algodiff/A/Scalar/index.html | 2 - .../Optimise/Algodiff/A/index.html | 158 ---- .../Optimise/Algodiff/Arr/index.html | 2 - .../Optimise/Algodiff/Builder/index.html | 2 - .../Builder/module-type-Aiso/index.html | 2 - .../Builder/module-type-Piso/index.html | 2 - .../Builder/module-type-Siao/index.html | 2 - .../Builder/module-type-Sipo/index.html | 7 - .../Builder/module-type-Siso/index.html | 2 - .../Builder/module-type-Sito/index.html | 7 - .../Optimise/Algodiff/Linalg/index.html | 6 - .../Optimise/Algodiff/Mat/index.html | 2 - .../Optimise/Algodiff/Maths/index.html | 2 - .../Optimise/Algodiff/NN/index.html | 20 - .../Optimise/Algodiff/index.html | 11 - .../module-type-Sig/Optimise/Batch/index.html | 2 - .../Optimise/Checkpoint/index.html | 2 - .../Optimise/Clipping/index.html | 2 - .../Optimise/Gradient/index.html | 9 - .../Optimise/Learning_Rate/index.html | 2 - .../module-type-Sig/Optimise/Loss/index.html | 2 - .../Optimise/Momentum/index.html | 2 - .../Optimise/Params/index.html | 14 - .../Optimise/Regularisation/index.html | 2 - .../Optimise/Stopping/index.html | 2 - .../module-type-Sig/Optimise/Utils/index.html | 7 - .../module-type-Sig/Optimise/index.html | 31 - .../module-type-Sig/index.html | 8 - owl/Owl_signal/.dummy | 0 owl/Owl_slicing/.dummy | 0 owl/Owl_slicing_basic/.dummy | 0 owl/Owl_slicing_fancy/.dummy | 0 owl/Owl_stats/.dummy | 0 owl/Owl_stats_dist/.dummy | 0 owl/Owl_stats_extend/.dummy | 0 owl/Owl_stats_prng/.dummy | 0 owl/Owl_stats_sampler/.dummy | 0 2055 files changed, 55065 deletions(-) delete mode 100644 fonts/KaTeX_AMS-Regular.woff2 delete mode 100644 fonts/KaTeX_Caligraphic-Bold.woff2 delete mode 100644 fonts/KaTeX_Caligraphic-Regular.woff2 delete mode 100644 fonts/KaTeX_Fraktur-Bold.woff2 delete mode 100644 fonts/KaTeX_Fraktur-Regular.woff2 delete mode 100644 fonts/KaTeX_Main-Bold.woff2 delete mode 100644 fonts/KaTeX_Main-BoldItalic.woff2 delete mode 100644 fonts/KaTeX_Main-Italic.woff2 delete mode 100644 fonts/KaTeX_Main-Regular.woff2 delete mode 100644 fonts/KaTeX_Math-BoldItalic.woff2 delete mode 100644 fonts/KaTeX_Math-Italic.woff2 delete mode 100644 fonts/KaTeX_SansSerif-Bold.woff2 delete mode 100644 fonts/KaTeX_SansSerif-Italic.woff2 delete mode 100644 fonts/KaTeX_SansSerif-Regular.woff2 delete mode 100644 fonts/KaTeX_Script-Regular.woff2 delete mode 100644 fonts/KaTeX_Size1-Regular.woff2 delete mode 100644 fonts/KaTeX_Size2-Regular.woff2 delete mode 100644 fonts/KaTeX_Size3-Regular.woff2 delete mode 100644 fonts/KaTeX_Size4-Regular.woff2 delete mode 100644 fonts/KaTeX_Typewriter-Regular.woff2 delete mode 100644 katex.min.css delete mode 100644 katex.min.js delete mode 100644 mathjax.js delete mode 100644 odoc.css delete mode 100644 owl-base/Owl_algodiff_check/.dummy delete mode 100644 owl-base/Owl_algodiff_check/Make/Forward/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/Reverse/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/A/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/A/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/A/Scalar/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/A/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/Arr/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/Maths/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/NN/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/argument-1-AD/index.html delete mode 100644 owl-base/Owl_algodiff_check/Make/index.html delete mode 100644 owl-base/Owl_algodiff_core/.dummy delete mode 100644 owl-base/Owl_algodiff_core/Make/A/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_core/Make/A/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_core/Make/A/Scalar/index.html delete mode 100644 owl-base/Owl_algodiff_core/Make/A/index.html delete mode 100644 owl-base/Owl_algodiff_core/Make/argument-1-A/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_core/Make/argument-1-A/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_core/Make/argument-1-A/Scalar/index.html delete mode 100644 owl-base/Owl_algodiff_core/Make/argument-1-A/index.html delete mode 100644 owl-base/Owl_algodiff_core/Make/index.html delete mode 100644 owl-base/Owl_algodiff_core_sig/.dummy delete mode 100644 owl-base/Owl_algodiff_core_sig/module-type-Sig/A/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_core_sig/module-type-Sig/A/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_core_sig/module-type-Sig/A/Scalar/index.html delete mode 100644 owl-base/Owl_algodiff_core_sig/module-type-Sig/A/index.html delete mode 100644 owl-base/Owl_algodiff_core_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_algodiff_generic/.dummy delete mode 100644 owl-base/Owl_algodiff_generic/Make/A/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/A/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/A/Scalar/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/A/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/Arr/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/Builder/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/Maths/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/NN/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/argument-1-A/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/argument-1-A/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/argument-1-A/Scalar/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/argument-1-A/index.html delete mode 100644 owl-base/Owl_algodiff_generic/Make/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/.dummy delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/Scalar/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/Arr/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/Maths/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/NN/index.html delete mode 100644 owl-base/Owl_algodiff_generic_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_algodiff_graph_convert/.dummy delete mode 100644 owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/Scalar/index.html delete mode 100644 owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/index.html delete mode 100644 owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/index.html delete mode 100644 owl-base/Owl_algodiff_graph_convert/Make/index.html delete mode 100644 owl-base/Owl_algodiff_graph_convert_sig/.dummy delete mode 100644 owl-base/Owl_algodiff_graph_convert_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_algodiff_ops/.dummy delete mode 100644 owl-base/Owl_algodiff_ops/Make/Arr/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/Builder/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/Maths/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/NN/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/Scalar/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/argument-1-Core/index.html delete mode 100644 owl-base/Owl_algodiff_ops/Make/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder/.dummy delete mode 100644 owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/Scalar/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder/Make/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder/Make/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder/Make/module-type-Piso/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder/Make/module-type-Siao/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder/Make/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder/Make/module-type-Siso/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder/Make/module-type-Sito/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder_sig/.dummy delete mode 100644 owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Piso/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Siao/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Siso/index.html delete mode 100644 owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Sito/index.html delete mode 100644 owl-base/Owl_algodiff_ops_sig/.dummy delete mode 100644 owl-base/Owl_algodiff_ops_sig/module-type-Sig/Arr/index.html delete mode 100644 owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/index.html delete mode 100644 owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_algodiff_ops_sig/module-type-Sig/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_ops_sig/module-type-Sig/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_ops_sig/module-type-Sig/Maths/index.html delete mode 100644 owl-base/Owl_algodiff_ops_sig/module-type-Sig/NN/index.html delete mode 100644 owl-base/Owl_algodiff_ops_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_algodiff_reverse/.dummy delete mode 100644 owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/Scalar/index.html delete mode 100644 owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/index.html delete mode 100644 owl-base/Owl_algodiff_reverse/Make/argument-1-C/index.html delete mode 100644 owl-base/Owl_algodiff_reverse/Make/index.html delete mode 100644 owl-base/Owl_algodiff_types/.dummy delete mode 100644 owl-base/Owl_algodiff_types/Make/argument-1-A/Linalg/index.html delete mode 100644 owl-base/Owl_algodiff_types/Make/argument-1-A/Mat/index.html delete mode 100644 owl-base/Owl_algodiff_types/Make/argument-1-A/Scalar/index.html delete mode 100644 owl-base/Owl_algodiff_types/Make/argument-1-A/index.html delete mode 100644 owl-base/Owl_algodiff_types/Make/index.html delete mode 100644 owl-base/Owl_algodiff_types_sig/.dummy delete mode 100644 owl-base/Owl_algodiff_types_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_base/.dummy delete mode 100644 owl-base/Owl_base_algodiff_primal_ops/.dummy delete mode 100644 owl-base/Owl_base_algodiff_primal_ops/D/Linalg/index.html delete mode 100644 owl-base/Owl_base_algodiff_primal_ops/D/Mat/index.html delete mode 100644 owl-base/Owl_base_algodiff_primal_ops/D/index.html delete mode 100644 owl-base/Owl_base_algodiff_primal_ops/S/Linalg/index.html delete mode 100644 owl-base/Owl_base_algodiff_primal_ops/S/Mat/index.html delete mode 100644 owl-base/Owl_base_algodiff_primal_ops/S/index.html delete mode 100644 owl-base/Owl_base_complex/.dummy delete mode 100644 owl-base/Owl_base_dense_common/.dummy delete mode 100644 owl-base/Owl_base_dense_matrix_c/.dummy delete mode 100644 owl-base/Owl_base_dense_matrix_d/.dummy delete mode 100644 owl-base/Owl_base_dense_matrix_generic/.dummy delete mode 100644 owl-base/Owl_base_dense_matrix_intf/.dummy delete mode 100644 owl-base/Owl_base_dense_matrix_intf/module-type-Common/index.html delete mode 100644 owl-base/Owl_base_dense_matrix_s/.dummy delete mode 100644 owl-base/Owl_base_dense_matrix_z/.dummy delete mode 100644 owl-base/Owl_base_dense_ndarray/.dummy delete mode 100644 owl-base/Owl_base_dense_ndarray/C/index.html delete mode 100644 owl-base/Owl_base_dense_ndarray/D/index.html delete mode 100644 owl-base/Owl_base_dense_ndarray/Generic/index.html delete mode 100644 owl-base/Owl_base_dense_ndarray/Operator/index.html delete mode 100644 owl-base/Owl_base_dense_ndarray/S/index.html delete mode 100644 owl-base/Owl_base_dense_ndarray/Z/index.html delete mode 100644 owl-base/Owl_base_dense_ndarray_c/.dummy delete mode 100644 owl-base/Owl_base_dense_ndarray_d/.dummy delete mode 100644 owl-base/Owl_base_dense_ndarray_generic/.dummy delete mode 100644 owl-base/Owl_base_dense_ndarray_intf/.dummy delete mode 100644 owl-base/Owl_base_dense_ndarray_intf/module-type-Common/index.html delete mode 100644 owl-base/Owl_base_dense_ndarray_intf/module-type-NN/index.html delete mode 100644 owl-base/Owl_base_dense_ndarray_intf/module-type-Real/index.html delete mode 100644 owl-base/Owl_base_dense_ndarray_s/.dummy delete mode 100644 owl-base/Owl_base_dense_ndarray_z/.dummy delete mode 100644 owl-base/Owl_base_linalg_c/.dummy delete mode 100644 owl-base/Owl_base_linalg_d/.dummy delete mode 100644 owl-base/Owl_base_linalg_generic/.dummy delete mode 100644 owl-base/Owl_base_linalg_intf/.dummy delete mode 100644 owl-base/Owl_base_linalg_intf/module-type-Common/index.html delete mode 100644 owl-base/Owl_base_linalg_intf/module-type-Real/index.html delete mode 100644 owl-base/Owl_base_linalg_s/.dummy delete mode 100644 owl-base/Owl_base_linalg_z/.dummy delete mode 100644 owl-base/Owl_base_maths/.dummy delete mode 100644 owl-base/Owl_base_slicing/.dummy delete mode 100644 owl-base/Owl_base_stats/.dummy delete mode 100644 owl-base/Owl_base_stats_dist_bernoulli/.dummy delete mode 100644 owl-base/Owl_base_stats_dist_cauchy/.dummy delete mode 100644 owl-base/Owl_base_stats_dist_exponential/.dummy delete mode 100644 owl-base/Owl_base_stats_dist_gamma/.dummy delete mode 100644 owl-base/Owl_base_stats_dist_gaussian/.dummy delete mode 100644 owl-base/Owl_base_stats_dist_gumbel1/.dummy delete mode 100644 owl-base/Owl_base_stats_dist_gumbel2/.dummy delete mode 100644 owl-base/Owl_base_stats_dist_uniform/.dummy delete mode 100644 owl-base/Owl_base_stats_prng/.dummy delete mode 100644 owl-base/Owl_computation/.dummy delete mode 100644 owl-base/Owl_computation_cpu_device/.dummy delete mode 100644 owl-base/Owl_computation_cpu_device/Make/argument-1-A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_cpu_device/Make/argument-1-A/Mat/index.html delete mode 100644 owl-base/Owl_computation_cpu_device/Make/argument-1-A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_cpu_device/Make/argument-1-A/index.html delete mode 100644 owl-base/Owl_computation_cpu_device/Make/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/.dummy delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Mat/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/Graph/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/argument-1-A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/argument-1-A/Mat/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/argument-1-A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/argument-1-A/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/CG_Eval/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/CG_Init/MultiMap/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/CG_Init/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Mat/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/index.html delete mode 100644 owl-base/Owl_computation_cpu_engine/Make_Nested/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/.dummy delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Mat/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/index.html delete mode 100644 owl-base/Owl_computation_cpu_eval/Make/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/.dummy delete mode 100644 owl-base/Owl_computation_cpu_init/Make/MultiMap/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Mat/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/index.html delete mode 100644 owl-base/Owl_computation_cpu_init/Make/index.html delete mode 100644 owl-base/Owl_computation_engine/.dummy delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/argument-1-Engine/index.html delete mode 100644 owl-base/Owl_computation_engine/Flatten/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/Optimiser/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/index.html delete mode 100644 owl-base/Owl_computation_engine/Make_Graph/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/.dummy delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/index.html delete mode 100644 owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/index.html delete mode 100644 owl-base/Owl_computation_graph/.dummy delete mode 100644 owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Mat/index.html delete mode 100644 owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/index.html delete mode 100644 owl-base/Owl_computation_graph/Make/argument-1-Optimiser/index.html delete mode 100644 owl-base/Owl_computation_graph/Make/index.html delete mode 100644 owl-base/Owl_computation_graph_sig/.dummy delete mode 100644 owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Mat/index.html delete mode 100644 owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/index.html delete mode 100644 owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/index.html delete mode 100644 owl-base/Owl_computation_graph_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_computation_operator/.dummy delete mode 100644 owl-base/Owl_computation_operator/Make/Linalg/index.html delete mode 100644 owl-base/Owl_computation_operator/Make/Mat/index.html delete mode 100644 owl-base/Owl_computation_operator/Make/Scalar/index.html delete mode 100644 owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_operator/Make/argument-1-Symbol/index.html delete mode 100644 owl-base/Owl_computation_operator/Make/index.html delete mode 100644 owl-base/Owl_computation_operator_sig/.dummy delete mode 100644 owl-base/Owl_computation_operator_sig/module-type-Sig/Linalg/index.html delete mode 100644 owl-base/Owl_computation_operator_sig/module-type-Sig/Mat/index.html delete mode 100644 owl-base/Owl_computation_operator_sig/module-type-Sig/Scalar/index.html delete mode 100644 owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/index.html delete mode 100644 owl-base/Owl_computation_operator_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_computation_optimiser/.dummy delete mode 100644 owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Linalg/index.html delete mode 100644 owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Mat/index.html delete mode 100644 owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Scalar/index.html delete mode 100644 owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/index.html delete mode 100644 owl-base/Owl_computation_optimiser/Make/argument-1-Operator/index.html delete mode 100644 owl-base/Owl_computation_optimiser/Make/index.html delete mode 100644 owl-base/Owl_computation_optimiser_sig/.dummy delete mode 100644 owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Mat/index.html delete mode 100644 owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/index.html delete mode 100644 owl-base/Owl_computation_optimiser_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_computation_shape/.dummy delete mode 100644 owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_shape/Make/argument-1-Type/Device/index.html delete mode 100644 owl-base/Owl_computation_shape/Make/argument-1-Type/index.html delete mode 100644 owl-base/Owl_computation_shape/Make/index.html delete mode 100644 owl-base/Owl_computation_shape_sig/.dummy delete mode 100644 owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_shape_sig/module-type-Sig/Type/index.html delete mode 100644 owl-base/Owl_computation_shape_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_computation_symbol/.dummy delete mode 100644 owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_symbol/Make/argument-1-Shape/index.html delete mode 100644 owl-base/Owl_computation_symbol/Make/index.html delete mode 100644 owl-base/Owl_computation_symbol_sig/.dummy delete mode 100644 owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/index.html delete mode 100644 owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/index.html delete mode 100644 owl-base/Owl_computation_symbol_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_computation_type/.dummy delete mode 100644 owl-base/Owl_computation_type/Make/argument-1-Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_type/Make/argument-1-Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_type/Make/argument-1-Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_type/Make/argument-1-Device/A/index.html delete mode 100644 owl-base/Owl_computation_type/Make/argument-1-Device/index.html delete mode 100644 owl-base/Owl_computation_type/Make/index.html delete mode 100644 owl-base/Owl_computation_type_sig/.dummy delete mode 100644 owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/index.html delete mode 100644 owl-base/Owl_computation_type_sig/module-type-Sig/Device/index.html delete mode 100644 owl-base/Owl_computation_type_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_const/.dummy delete mode 100644 owl-base/Owl_const/CGS/index.html delete mode 100644 owl-base/Owl_const/CGSM/index.html delete mode 100644 owl-base/Owl_const/MKS/index.html delete mode 100644 owl-base/Owl_const/Prefix/index.html delete mode 100644 owl-base/Owl_const/SI/index.html delete mode 100644 owl-base/Owl_countmin_sketch/.dummy delete mode 100644 owl-base/Owl_countmin_sketch/Make/argument-1-T/index.html delete mode 100644 owl-base/Owl_countmin_sketch/Make/index.html delete mode 100644 owl-base/Owl_countmin_sketch/Native/index.html delete mode 100644 owl-base/Owl_countmin_sketch/Owl/index.html delete mode 100644 owl-base/Owl_countmin_sketch_sig/.dummy delete mode 100644 owl-base/Owl_countmin_sketch_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_countmin_table/.dummy delete mode 100644 owl-base/Owl_countmin_table/Native/index.html delete mode 100644 owl-base/Owl_countmin_table/Owl/index.html delete mode 100644 owl-base/Owl_countmin_table/module-type-Sig/index.html delete mode 100644 owl-base/Owl_dataframe/.dummy delete mode 100644 owl-base/Owl_exception/.dummy delete mode 100644 owl-base/Owl_graph/.dummy delete mode 100644 owl-base/Owl_heavyhitters_sketch/.dummy delete mode 100644 owl-base/Owl_heavyhitters_sketch/Make/argument-1-CM/index.html delete mode 100644 owl-base/Owl_heavyhitters_sketch/Make/index.html delete mode 100644 owl-base/Owl_heavyhitters_sketch/Native/index.html delete mode 100644 owl-base/Owl_heavyhitters_sketch/Owl/index.html delete mode 100644 owl-base/Owl_heavyhitters_sketch_sig/.dummy delete mode 100644 owl-base/Owl_heavyhitters_sketch_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_io/.dummy delete mode 100644 owl-base/Owl_lazy/.dummy delete mode 100644 owl-base/Owl_lazy/Make/argument-1-A/Linalg/index.html delete mode 100644 owl-base/Owl_lazy/Make/argument-1-A/Mat/index.html delete mode 100644 owl-base/Owl_lazy/Make/argument-1-A/Scalar/index.html delete mode 100644 owl-base/Owl_lazy/Make/argument-1-A/index.html delete mode 100644 owl-base/Owl_lazy/Make/index.html delete mode 100644 owl-base/Owl_log/.dummy delete mode 100644 owl-base/Owl_maths_interpolate/.dummy delete mode 100644 owl-base/Owl_maths_quadrature/.dummy delete mode 100644 owl-base/Owl_maths_root/.dummy delete mode 100644 owl-base/Owl_neural_compiler/.dummy delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Mat/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/Graph/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Engine/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Activation/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Add/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/AlphaDropout/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Average/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/AvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/AvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Concatenate/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Conv1D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Conv2D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Conv3D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/DilatedConv1D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/DilatedConv2D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/DilatedConv3D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Dot/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Dropout/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Embedding/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Flatten/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/FullyConnected/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GRU/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GaussianDropout/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GaussianNoise/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalAvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalAvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalMaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalMaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Init/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Input/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/LSTM/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Lambda/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/LambdaArray/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Linear/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/LinearNoBias/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Masking/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Max/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/MaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/MaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Mul/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Normalisation/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Arr/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Linalg/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Mat/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Maths/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/NN/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Batch/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Checkpoint/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Clipping/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Gradient/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Learning_Rate/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Loss/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Momentum/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Params/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Regularisation/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Stopping/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Utils/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Padding1D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Padding2D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Padding3D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Recurrent/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Reshape/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Slice/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/TransposeConv1D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/TransposeConv2D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/TransposeConv3D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/UpSampling1D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/UpSampling2D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/UpSampling3D/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/Graph/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/Neural/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Mat/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/argument-1-E/index.html delete mode 100644 owl-base/Owl_neural_compiler/Make/index.html delete mode 100644 owl-base/Owl_neural_generic/.dummy delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Activation/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Add/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/AlphaDropout/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Average/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/AvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/AvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Concatenate/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Conv1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Conv2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Conv3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/DilatedConv1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/DilatedConv2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/DilatedConv3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Dot/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Dropout/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Embedding/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Flatten/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/FullyConnected/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GRU/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GaussianDropout/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GaussianNoise/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalAvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalAvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalMaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalMaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Init/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Input/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/LSTM/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Lambda/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/LambdaArray/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Linear/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/LinearNoBias/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Masking/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Max/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/MaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/MaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Mul/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Normalisation/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Arr/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Linalg/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Mat/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Maths/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/NN/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Batch/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Checkpoint/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Clipping/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Gradient/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Learning_Rate/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Loss/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Momentum/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Params/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Regularisation/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Stopping/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Utils/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Padding1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Padding2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Padding3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Recurrent/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Reshape/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Slice/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/TransposeConv1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/TransposeConv2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/TransposeConv3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/UpSampling1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/UpSampling2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/UpSampling3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/argument-1-Graph/index.html delete mode 100644 owl-base/Owl_neural_generic/Flatten/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Activation/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Add/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/AlphaDropout/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Average/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/AvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/AvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Concatenate/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Conv1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Conv2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Conv3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/DilatedConv1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/DilatedConv2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/DilatedConv3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Dot/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Dropout/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Embedding/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Flatten/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/FullyConnected/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/GRU/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/GaussianDropout/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/GaussianNoise/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalAvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalAvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalMaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalMaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Init/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Input/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/LSTM/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Lambda/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/LambdaArray/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Linear/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/LinearNoBias/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Masking/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Max/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/MaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/MaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Mul/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Normalisation/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Arr/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Linalg/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Mat/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Maths/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/NN/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Batch/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Checkpoint/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Clipping/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Gradient/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Learning_Rate/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Loss/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Momentum/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Params/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Regularisation/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Stopping/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Utils/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Padding1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Padding2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Padding3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Recurrent/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Reshape/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/Slice/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/TransposeConv1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/TransposeConv2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/TransposeConv3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/UpSampling1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/UpSampling2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/UpSampling3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/Neuron/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/Graph/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/argument-1-A/Linalg/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/argument-1-A/Mat/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/argument-1-A/Scalar/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/argument-1-A/index.html delete mode 100644 owl-base/Owl_neural_generic/Make/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Activation/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Add/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/AlphaDropout/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Average/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/AvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/AvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Concatenate/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Conv1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Conv2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Conv3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/DilatedConv1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/DilatedConv2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/DilatedConv3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Dot/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Dropout/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Embedding/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Flatten/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/FullyConnected/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/GRU/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/GaussianDropout/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/GaussianNoise/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalAvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalAvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalMaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalMaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Init/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Input/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/LSTM/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Lambda/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/LambdaArray/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Linear/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/LinearNoBias/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Masking/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Max/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/MaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/MaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Mul/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Normalisation/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Arr/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Linalg/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Mat/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Maths/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/NN/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Batch/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Checkpoint/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Clipping/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Gradient/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Learning_Rate/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Loss/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Momentum/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Params/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Regularisation/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Stopping/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Utils/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Padding1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Padding2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Padding3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Recurrent/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Reshape/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/Slice/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/TransposeConv1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/TransposeConv2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/TransposeConv3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/UpSampling1D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/UpSampling2D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/UpSampling3D/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/Neuron/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/Linalg/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/Mat/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/Scalar/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/index.html delete mode 100644 owl-base/Owl_neural_generic/Make_Embedded/index.html delete mode 100644 owl-base/Owl_neural_graph/.dummy delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Activation/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Add/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/AlphaDropout/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Average/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/AvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/AvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Concatenate/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Conv1D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Conv2D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Conv3D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/DilatedConv1D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/DilatedConv2D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/DilatedConv3D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Dot/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Dropout/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Embedding/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Flatten/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/FullyConnected/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/GRU/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/GaussianDropout/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/GaussianNoise/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalAvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalAvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalMaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalMaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Init/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Input/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/LSTM/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Lambda/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/LambdaArray/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Linear/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/LinearNoBias/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Masking/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Max/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/MaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/MaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Mul/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Normalisation/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Arr/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Linalg/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Mat/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Maths/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/NN/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Batch/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Checkpoint/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Clipping/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Gradient/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Learning_Rate/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Loss/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Momentum/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Params/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Regularisation/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Stopping/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Utils/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Padding1D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Padding2D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Padding3D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Recurrent/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Reshape/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/Slice/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/TransposeConv1D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/TransposeConv2D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/TransposeConv3D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/UpSampling1D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/UpSampling2D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/UpSampling3D/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/argument-1-Neuron/index.html delete mode 100644 owl-base/Owl_neural_graph/Make/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/.dummy delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Activation/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Add/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/AlphaDropout/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Average/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/AvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/AvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Concatenate/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Conv1D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Conv2D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Conv3D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/DilatedConv1D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/DilatedConv2D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/DilatedConv3D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Dot/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Dropout/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Embedding/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Flatten/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/FullyConnected/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GRU/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GaussianDropout/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GaussianNoise/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalAvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalAvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalMaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalMaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Init/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Input/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/LSTM/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Lambda/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/LambdaArray/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Linear/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/LinearNoBias/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Masking/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Max/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/MaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/MaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Mul/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Normalisation/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Arr/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Linalg/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Mat/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Maths/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/NN/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Batch/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Checkpoint/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Clipping/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Gradient/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Learning_Rate/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Loss/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Momentum/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Params/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Regularisation/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Stopping/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Utils/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Padding1D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Padding2D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Padding3D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Recurrent/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Reshape/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Slice/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/TransposeConv1D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/TransposeConv2D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/TransposeConv3D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/UpSampling1D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/UpSampling2D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/UpSampling3D/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/index.html delete mode 100644 owl-base/Owl_neural_graph_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_neural_neuron/.dummy delete mode 100644 owl-base/Owl_neural_neuron/Make/Activation/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Add/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/AlphaDropout/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Average/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/AvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/AvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Concatenate/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Conv1D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Conv2D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Conv3D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/DilatedConv1D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/DilatedConv2D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/DilatedConv3D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Dot/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Dropout/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Embedding/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Flatten/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/FullyConnected/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/GRU/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/GaussianDropout/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/GaussianNoise/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/GlobalAvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/GlobalAvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/GlobalMaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/GlobalMaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Init/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Input/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/LSTM/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Lambda/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/LambdaArray/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Linear/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/LinearNoBias/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Masking/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Max/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/MaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/MaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Mul/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Normalisation/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Padding1D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Padding2D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Padding3D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Recurrent/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Reshape/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/Slice/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/TransposeConv1D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/TransposeConv2D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/TransposeConv3D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/UpSampling1D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/UpSampling2D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/UpSampling3D/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Arr/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Linalg/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Mat/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Maths/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/NN/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Batch/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Checkpoint/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Clipping/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Gradient/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Learning_Rate/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Loss/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Momentum/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Params/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Regularisation/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Stopping/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Utils/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/argument-1-Optimise/index.html delete mode 100644 owl-base/Owl_neural_neuron/Make/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/.dummy delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Activation/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Add/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/AlphaDropout/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Average/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/AvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/AvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Concatenate/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Conv1D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Conv2D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Conv3D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/DilatedConv1D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/DilatedConv2D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/DilatedConv3D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Dot/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Dropout/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Embedding/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Flatten/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/FullyConnected/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/GRU/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/GaussianDropout/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/GaussianNoise/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalAvgPool1D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalAvgPool2D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalMaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalMaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Init/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Input/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/LSTM/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Lambda/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/LambdaArray/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Linear/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/LinearNoBias/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Masking/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Max/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/MaxPool1D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/MaxPool2D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Mul/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Normalisation/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Arr/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Linalg/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Mat/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Maths/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/NN/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Batch/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Checkpoint/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Clipping/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Gradient/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Learning_Rate/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Loss/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Momentum/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Params/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Regularisation/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Stopping/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Utils/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Padding1D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Padding2D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Padding3D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Recurrent/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Reshape/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/Slice/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/TransposeConv1D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/TransposeConv2D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/TransposeConv3D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/UpSampling1D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/UpSampling2D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/UpSampling3D/index.html delete mode 100644 owl-base/Owl_neural_neuron_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_numdiff_generic/.dummy delete mode 100644 owl-base/Owl_numdiff_generic/Make/argument-1-A/index.html delete mode 100644 owl-base/Owl_numdiff_generic/Make/index.html delete mode 100644 owl-base/Owl_numdiff_generic_sig/.dummy delete mode 100644 owl-base/Owl_numdiff_generic_sig/Impl/argument-1-A/index.html delete mode 100644 owl-base/Owl_numdiff_generic_sig/Impl/index.html delete mode 100644 owl-base/Owl_numdiff_generic_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_operator/.dummy delete mode 100644 owl-base/Owl_operator/Make_Basic/argument-1-M/index.html delete mode 100644 owl-base/Owl_operator/Make_Basic/index.html delete mode 100644 owl-base/Owl_operator/Make_Extend/argument-1-M/index.html delete mode 100644 owl-base/Owl_operator/Make_Extend/index.html delete mode 100644 owl-base/Owl_operator/Make_Linalg/argument-1-M/index.html delete mode 100644 owl-base/Owl_operator/Make_Linalg/index.html delete mode 100644 owl-base/Owl_operator/Make_Matrix/argument-1-M/index.html delete mode 100644 owl-base/Owl_operator/Make_Matrix/index.html delete mode 100644 owl-base/Owl_operator/Make_Ndarray/argument-1-M/index.html delete mode 100644 owl-base/Owl_operator/Make_Ndarray/index.html delete mode 100644 owl-base/Owl_optimise_generic/.dummy delete mode 100644 owl-base/Owl_optimise_generic/Make/Batch/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/Checkpoint/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/Clipping/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/Gradient/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/Learning_Rate/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/Loss/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/Momentum/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/Params/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/Regularisation/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/Stopping/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/Utils/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/Linalg/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/Mat/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/Scalar/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Arr/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Linalg/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Mat/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Maths/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/NN/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/index.html delete mode 100644 owl-base/Owl_optimise_generic/Make/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/.dummy delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/Linalg/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/Mat/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/Scalar/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Arr/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Linalg/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Mat/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Maths/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/NN/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Batch/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Checkpoint/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Clipping/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Gradient/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Learning_Rate/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Loss/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Momentum/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Params/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Regularisation/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Stopping/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/Utils/index.html delete mode 100644 owl-base/Owl_optimise_generic_sig/module-type-Sig/index.html delete mode 100644 owl-base/Owl_pretty/.dummy delete mode 100644 owl-base/Owl_types/.dummy delete mode 100644 owl-base/Owl_types/module-type-Computation_Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_types/module-type-Computation_Device/A/Mat/index.html delete mode 100644 owl-base/Owl_types/module-type-Computation_Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_types/module-type-Computation_Device/A/index.html delete mode 100644 owl-base/Owl_types/module-type-Computation_Device/index.html delete mode 100644 owl-base/Owl_types/module-type-Ndarray_Algodiff/Linalg/index.html delete mode 100644 owl-base/Owl_types/module-type-Ndarray_Algodiff/Mat/index.html delete mode 100644 owl-base/Owl_types/module-type-Ndarray_Algodiff/Scalar/index.html delete mode 100644 owl-base/Owl_types/module-type-Ndarray_Algodiff/index.html delete mode 100644 owl-base/Owl_types/module-type-Ndarray_Basic/index.html delete mode 100644 owl-base/Owl_types/module-type-Ndarray_Compare/index.html delete mode 100644 owl-base/Owl_types/module-type-Ndarray_Mutable/Linalg/index.html delete mode 100644 owl-base/Owl_types/module-type-Ndarray_Mutable/Mat/index.html delete mode 100644 owl-base/Owl_types/module-type-Ndarray_Mutable/Scalar/index.html delete mode 100644 owl-base/Owl_types/module-type-Ndarray_Mutable/index.html delete mode 100644 owl-base/Owl_types/module-type-Ndarray_Numdiff/index.html delete mode 100644 owl-base/Owl_types/module-type-Stats_Dist/Linalg/index.html delete mode 100644 owl-base/Owl_types/module-type-Stats_Dist/Mat/index.html delete mode 100644 owl-base/Owl_types/module-type-Stats_Dist/Scalar/index.html delete mode 100644 owl-base/Owl_types/module-type-Stats_Dist/index.html delete mode 100644 owl-base/Owl_types_common/.dummy delete mode 100644 owl-base/Owl_types_computation_device/.dummy delete mode 100644 owl-base/Owl_types_computation_device/module-type-Sig/A/Linalg/index.html delete mode 100644 owl-base/Owl_types_computation_device/module-type-Sig/A/Mat/index.html delete mode 100644 owl-base/Owl_types_computation_device/module-type-Sig/A/Scalar/index.html delete mode 100644 owl-base/Owl_types_computation_device/module-type-Sig/A/index.html delete mode 100644 owl-base/Owl_types_computation_device/module-type-Sig/index.html delete mode 100644 owl-base/Owl_types_computation_engine/.dummy delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Linalg/index.html delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Mat/index.html delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Scalar/index.html delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/index.html delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/index.html delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/index.html delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/index.html delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/Graph/index.html delete mode 100644 owl-base/Owl_types_computation_engine/module-type-Sig/index.html delete mode 100644 owl-base/Owl_types_maths_basic/.dummy delete mode 100644 owl-base/Owl_types_maths_basic/module-type-Sig/index.html delete mode 100644 owl-base/Owl_types_ndarray_algodiff/.dummy delete mode 100644 owl-base/Owl_types_ndarray_algodiff/module-type-Sig/Linalg/index.html delete mode 100644 owl-base/Owl_types_ndarray_algodiff/module-type-Sig/Mat/index.html delete mode 100644 owl-base/Owl_types_ndarray_algodiff/module-type-Sig/Scalar/index.html delete mode 100644 owl-base/Owl_types_ndarray_algodiff/module-type-Sig/index.html delete mode 100644 owl-base/Owl_types_ndarray_basic/.dummy delete mode 100644 owl-base/Owl_types_ndarray_basic/module-type-Sig/index.html delete mode 100644 owl-base/Owl_types_ndarray_compare/.dummy delete mode 100644 owl-base/Owl_types_ndarray_compare/module-type-Sig/index.html delete mode 100644 owl-base/Owl_types_ndarray_eltcmp/.dummy delete mode 100644 owl-base/Owl_types_ndarray_eltcmp/module-type-Sig/index.html delete mode 100644 owl-base/Owl_types_ndarray_mutable/.dummy delete mode 100644 owl-base/Owl_types_ndarray_mutable/module-type-Sig/Linalg/index.html delete mode 100644 owl-base/Owl_types_ndarray_mutable/module-type-Sig/Mat/index.html delete mode 100644 owl-base/Owl_types_ndarray_mutable/module-type-Sig/Scalar/index.html delete mode 100644 owl-base/Owl_types_ndarray_mutable/module-type-Sig/index.html delete mode 100644 owl-base/Owl_types_ndarray_numdiff/.dummy delete mode 100644 owl-base/Owl_types_ndarray_numdiff/module-type-Sig/index.html delete mode 100644 owl-base/Owl_types_operator/.dummy delete mode 100644 owl-base/Owl_types_operator/module-type-BasicSig/index.html delete mode 100644 owl-base/Owl_types_operator/module-type-ExtendSig/index.html delete mode 100644 owl-base/Owl_types_operator/module-type-LinalgSig/index.html delete mode 100644 owl-base/Owl_types_operator/module-type-MatrixSig/index.html delete mode 100644 owl-base/Owl_types_operator/module-type-NdarraySig/index.html delete mode 100644 owl-base/Owl_types_stats_basic/.dummy delete mode 100644 owl-base/Owl_types_stats_dist/.dummy delete mode 100644 owl-base/Owl_types_stats_dist/module-type-Sig/Linalg/index.html delete mode 100644 owl-base/Owl_types_stats_dist/module-type-Sig/Mat/index.html delete mode 100644 owl-base/Owl_types_stats_dist/module-type-Sig/Scalar/index.html delete mode 100644 owl-base/Owl_types_stats_dist/module-type-Sig/index.html delete mode 100644 owl-base/Owl_utils/.dummy delete mode 100644 owl-base/Owl_utils_array/.dummy delete mode 100644 owl-base/Owl_utils_heap/.dummy delete mode 100644 owl-base/Owl_utils_infer_shape/.dummy delete mode 100644 owl-base/Owl_utils_multimap/.dummy delete mode 100644 owl-base/Owl_utils_multimap/Make/index.html delete mode 100644 owl-base/Owl_utils_ndarray/.dummy delete mode 100644 owl-base/Owl_utils_stack/.dummy delete mode 100644 owl-base/Owl_view/.dummy delete mode 100644 owl-base/Owl_view/Make/argument-1-A/index.html delete mode 100644 owl-base/Owl_view/Make/index.html delete mode 100644 owl-top/Owl_top/.dummy delete mode 100644 owl/Owl/.dummy delete mode 100644 owl/Owl/Arr/index.html delete mode 100644 owl/Owl/Mat/index.html delete mode 100644 owl/Owl_algodiff/.dummy delete mode 100644 owl/Owl_algodiff/D/A/Linalg/index.html delete mode 100644 owl/Owl_algodiff/D/A/Mat/index.html delete mode 100644 owl/Owl_algodiff/D/A/Scalar/index.html delete mode 100644 owl/Owl_algodiff/D/A/index.html delete mode 100644 owl/Owl_algodiff/D/Arr/index.html delete mode 100644 owl/Owl_algodiff/D/Builder/index.html delete mode 100644 owl/Owl_algodiff/D/Builder/module-type-Aiso/index.html delete mode 100644 owl/Owl_algodiff/D/Builder/module-type-Piso/index.html delete mode 100644 owl/Owl_algodiff/D/Builder/module-type-Siao/index.html delete mode 100644 owl/Owl_algodiff/D/Builder/module-type-Sipo/index.html delete mode 100644 owl/Owl_algodiff/D/Builder/module-type-Siso/index.html delete mode 100644 owl/Owl_algodiff/D/Builder/module-type-Sito/index.html delete mode 100644 owl/Owl_algodiff/D/Linalg/index.html delete mode 100644 owl/Owl_algodiff/D/Mat/index.html delete mode 100644 owl/Owl_algodiff/D/Maths/index.html delete mode 100644 owl/Owl_algodiff/D/NN/index.html delete mode 100644 owl/Owl_algodiff/D/index.html delete mode 100644 owl/Owl_algodiff/S/A/Linalg/index.html delete mode 100644 owl/Owl_algodiff/S/A/Mat/index.html delete mode 100644 owl/Owl_algodiff/S/A/Scalar/index.html delete mode 100644 owl/Owl_algodiff/S/A/index.html delete mode 100644 owl/Owl_algodiff/S/Arr/index.html delete mode 100644 owl/Owl_algodiff/S/Builder/index.html delete mode 100644 owl/Owl_algodiff/S/Builder/module-type-Aiso/index.html delete mode 100644 owl/Owl_algodiff/S/Builder/module-type-Piso/index.html delete mode 100644 owl/Owl_algodiff/S/Builder/module-type-Siao/index.html delete mode 100644 owl/Owl_algodiff/S/Builder/module-type-Sipo/index.html delete mode 100644 owl/Owl_algodiff/S/Builder/module-type-Siso/index.html delete mode 100644 owl/Owl_algodiff/S/Builder/module-type-Sito/index.html delete mode 100644 owl/Owl_algodiff/S/Linalg/index.html delete mode 100644 owl/Owl_algodiff/S/Mat/index.html delete mode 100644 owl/Owl_algodiff/S/Maths/index.html delete mode 100644 owl/Owl_algodiff/S/NN/index.html delete mode 100644 owl/Owl_algodiff/S/index.html delete mode 100644 owl/Owl_algodiff_primal_ops/.dummy delete mode 100644 owl/Owl_algodiff_primal_ops/D/Linalg/index.html delete mode 100644 owl/Owl_algodiff_primal_ops/D/Mat/index.html delete mode 100644 owl/Owl_algodiff_primal_ops/D/index.html delete mode 100644 owl/Owl_algodiff_primal_ops/S/Linalg/index.html delete mode 100644 owl/Owl_algodiff_primal_ops/S/Mat/index.html delete mode 100644 owl/Owl_algodiff_primal_ops/S/index.html delete mode 100644 owl/Owl_cblas/.dummy delete mode 100644 owl/Owl_cblas_basic/.dummy delete mode 100644 owl/Owl_cblas_generated/.dummy delete mode 100644 owl/Owl_cluster/.dummy delete mode 100644 owl/Owl_core_types/.dummy delete mode 100644 owl/Owl_dataset/.dummy delete mode 100644 owl/Owl_dense/.dummy delete mode 100644 owl/Owl_dense_matrix/.dummy delete mode 100644 owl/Owl_dense_matrix/C/index.html delete mode 100644 owl/Owl_dense_matrix/D/index.html delete mode 100644 owl/Owl_dense_matrix/Generic/index.html delete mode 100644 owl/Owl_dense_matrix/Operator/index.html delete mode 100644 owl/Owl_dense_matrix/S/index.html delete mode 100644 owl/Owl_dense_matrix/Z/index.html delete mode 100644 owl/Owl_dense_matrix_c/.dummy delete mode 100644 owl/Owl_dense_matrix_d/.dummy delete mode 100644 owl/Owl_dense_matrix_generic/.dummy delete mode 100644 owl/Owl_dense_matrix_intf/.dummy delete mode 100644 owl/Owl_dense_matrix_intf/module-type-Common/index.html delete mode 100644 owl/Owl_dense_matrix_intf/module-type-Complex/index.html delete mode 100644 owl/Owl_dense_matrix_intf/module-type-Real/index.html delete mode 100644 owl/Owl_dense_matrix_s/.dummy delete mode 100644 owl/Owl_dense_matrix_z/.dummy delete mode 100644 owl/Owl_dense_ndarray/.dummy delete mode 100644 owl/Owl_dense_ndarray/Any/index.html delete mode 100644 owl/Owl_dense_ndarray/C/index.html delete mode 100644 owl/Owl_dense_ndarray/D/index.html delete mode 100644 owl/Owl_dense_ndarray/Generic/index.html delete mode 100644 owl/Owl_dense_ndarray/Operator/index.html delete mode 100644 owl/Owl_dense_ndarray/S/index.html delete mode 100644 owl/Owl_dense_ndarray/Z/index.html delete mode 100644 owl/Owl_dense_ndarray_a/.dummy delete mode 100644 owl/Owl_dense_ndarray_c/.dummy delete mode 100644 owl/Owl_dense_ndarray_d/.dummy delete mode 100644 owl/Owl_dense_ndarray_generic/.dummy delete mode 100644 owl/Owl_dense_ndarray_intf/.dummy delete mode 100644 owl/Owl_dense_ndarray_intf/module-type-Common/index.html delete mode 100644 owl/Owl_dense_ndarray_intf/module-type-Complex/index.html delete mode 100644 owl/Owl_dense_ndarray_intf/module-type-Distribution/index.html delete mode 100644 owl/Owl_dense_ndarray_intf/module-type-NN/index.html delete mode 100644 owl/Owl_dense_ndarray_intf/module-type-Real/index.html delete mode 100644 owl/Owl_dense_ndarray_s/.dummy delete mode 100644 owl/Owl_dense_ndarray_z/.dummy delete mode 100644 owl/Owl_distribution/.dummy delete mode 100644 owl/Owl_distribution/Make/Beta/index.html delete mode 100644 owl/Owl_distribution/Make/Cauchy/index.html delete mode 100644 owl/Owl_distribution/Make/Chi2/index.html delete mode 100644 owl/Owl_distribution/Make/Exponential/index.html delete mode 100644 owl/Owl_distribution/Make/F/index.html delete mode 100644 owl/Owl_distribution/Make/Gamma/index.html delete mode 100644 owl/Owl_distribution/Make/Gaussian/index.html delete mode 100644 owl/Owl_distribution/Make/Gumbel1/index.html delete mode 100644 owl/Owl_distribution/Make/Gumbel2/index.html delete mode 100644 owl/Owl_distribution/Make/Laplace/index.html delete mode 100644 owl/Owl_distribution/Make/Logistic/index.html delete mode 100644 owl/Owl_distribution/Make/Lognormal/index.html delete mode 100644 owl/Owl_distribution/Make/Lomax/index.html delete mode 100644 owl/Owl_distribution/Make/Poisson/index.html delete mode 100644 owl/Owl_distribution/Make/Rayleigh/index.html delete mode 100644 owl/Owl_distribution/Make/Uniform/index.html delete mode 100644 owl/Owl_distribution/Make/Weibull/index.html delete mode 100644 owl/Owl_distribution/Make/argument-1-A/Linalg/index.html delete mode 100644 owl/Owl_distribution/Make/argument-1-A/Mat/index.html delete mode 100644 owl/Owl_distribution/Make/argument-1-A/Scalar/index.html delete mode 100644 owl/Owl_distribution/Make/argument-1-A/index.html delete mode 100644 owl/Owl_distribution/Make/index.html delete mode 100644 owl/Owl_distribution_common/.dummy delete mode 100644 owl/Owl_distribution_generic/.dummy delete mode 100644 owl/Owl_fft/.dummy delete mode 100644 owl/Owl_fft/D/index.html delete mode 100644 owl/Owl_fft/Generic/index.html delete mode 100644 owl/Owl_fft/S/index.html delete mode 100644 owl/Owl_fft_d/.dummy delete mode 100644 owl/Owl_fft_generic/.dummy delete mode 100644 owl/Owl_fft_s/.dummy delete mode 100644 owl/Owl_fftpack/.dummy delete mode 100644 owl/Owl_lapacke/.dummy delete mode 100644 owl/Owl_lapacke_generated/.dummy delete mode 100644 owl/Owl_linalg/.dummy delete mode 100644 owl/Owl_linalg/C/index.html delete mode 100644 owl/Owl_linalg/D/index.html delete mode 100644 owl/Owl_linalg/Generic/index.html delete mode 100644 owl/Owl_linalg/S/index.html delete mode 100644 owl/Owl_linalg/Z/index.html delete mode 100644 owl/Owl_linalg_c/.dummy delete mode 100644 owl/Owl_linalg_d/.dummy delete mode 100644 owl/Owl_linalg_generic/.dummy delete mode 100644 owl/Owl_linalg_intf/.dummy delete mode 100644 owl/Owl_linalg_intf/module-type-Common/index.html delete mode 100644 owl/Owl_linalg_intf/module-type-Real/index.html delete mode 100644 owl/Owl_linalg_s/.dummy delete mode 100644 owl/Owl_linalg_z/.dummy delete mode 100644 owl/Owl_maths/.dummy delete mode 100644 owl/Owl_maths_special/.dummy delete mode 100644 owl/Owl_matrix/.dummy delete mode 100644 owl/Owl_matrix_check/.dummy delete mode 100644 owl/Owl_matrix_swap/.dummy delete mode 100644 owl/Owl_ndarray/.dummy delete mode 100644 owl/Owl_ndarray_contract/.dummy delete mode 100644 owl/Owl_ndarray_conv/.dummy delete mode 100644 owl/Owl_ndarray_fma/.dummy delete mode 100644 owl/Owl_ndarray_maths/.dummy delete mode 100644 owl/Owl_ndarray_pool/.dummy delete mode 100644 owl/Owl_ndarray_repeat/.dummy delete mode 100644 owl/Owl_ndarray_slide/.dummy delete mode 100644 owl/Owl_ndarray_sort/.dummy delete mode 100644 owl/Owl_ndarray_transpose/.dummy delete mode 100644 owl/Owl_ndarray_upsampling/.dummy delete mode 100644 owl/Owl_ndarray_utils/.dummy delete mode 100644 owl/Owl_neural/.dummy delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Activation/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Add/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/AlphaDropout/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Average/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/AvgPool1D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/AvgPool2D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Concatenate/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Conv1D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Conv2D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Conv3D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/DilatedConv1D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/DilatedConv2D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/DilatedConv3D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Dot/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Dropout/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Embedding/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Flatten/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/FullyConnected/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/GRU/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/GaussianDropout/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/GaussianNoise/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/GlobalAvgPool1D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/GlobalAvgPool2D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/GlobalMaxPool1D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/GlobalMaxPool2D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Init/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Input/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/LSTM/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Lambda/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/LambdaArray/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Linear/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/LinearNoBias/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Masking/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Max/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/MaxPool1D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/MaxPool2D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Mul/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Normalisation/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Arr/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Linalg/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Mat/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Maths/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/NN/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Batch/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Checkpoint/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Clipping/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Gradient/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Learning_Rate/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Loss/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Momentum/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Params/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Regularisation/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Stopping/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/Utils/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Optimise/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Padding1D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Padding2D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Padding3D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Recurrent/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Reshape/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/Slice/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/TransposeConv1D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/TransposeConv2D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/TransposeConv3D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/UpSampling1D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/UpSampling2D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/UpSampling3D/index.html delete mode 100644 owl/Owl_neural/D/Graph/Neuron/index.html delete mode 100644 owl/Owl_neural/D/Graph/index.html delete mode 100644 owl/Owl_neural/D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Activation/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Add/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/AlphaDropout/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Average/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/AvgPool1D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/AvgPool2D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Concatenate/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Conv1D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Conv2D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Conv3D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/DilatedConv1D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/DilatedConv2D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/DilatedConv3D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Dot/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Dropout/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Embedding/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Flatten/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/FullyConnected/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/GRU/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/GaussianDropout/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/GaussianNoise/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/GlobalAvgPool1D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/GlobalAvgPool2D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/GlobalMaxPool1D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/GlobalMaxPool2D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Init/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Input/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/LSTM/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Lambda/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/LambdaArray/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Linear/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/LinearNoBias/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Masking/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Max/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/MaxPool1D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/MaxPool2D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Mul/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Normalisation/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Arr/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Linalg/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Mat/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Maths/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/NN/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Batch/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Checkpoint/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Clipping/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Gradient/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Learning_Rate/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Loss/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Momentum/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Params/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Regularisation/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Stopping/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/Utils/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Optimise/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Padding1D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Padding2D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Padding3D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Recurrent/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Reshape/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/Slice/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/TransposeConv1D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/TransposeConv2D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/TransposeConv3D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/UpSampling1D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/UpSampling2D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/UpSampling3D/index.html delete mode 100644 owl/Owl_neural/S/Graph/Neuron/index.html delete mode 100644 owl/Owl_neural/S/Graph/index.html delete mode 100644 owl/Owl_neural/S/index.html delete mode 100644 owl/Owl_neural_parallel/.dummy delete mode 100644 owl/Owl_neural_parallel/Make/argument-1-M/index.html delete mode 100644 owl/Owl_neural_parallel/Make/argument-2-E/index.html delete mode 100644 owl/Owl_neural_parallel/Make/index.html delete mode 100644 owl/Owl_neural_parallel/module-type-EngineSig/index.html delete mode 100644 owl/Owl_neural_parallel/module-type-ModelSig/index.html delete mode 100644 owl/Owl_nlp/.dummy delete mode 100644 owl/Owl_nlp_corpus/.dummy delete mode 100644 owl/Owl_nlp_lda/.dummy delete mode 100644 owl/Owl_nlp_similarity/.dummy delete mode 100644 owl/Owl_nlp_tfidf/.dummy delete mode 100644 owl/Owl_nlp_utils/.dummy delete mode 100644 owl/Owl_nlp_vocabulary/.dummy delete mode 100644 owl/Owl_optimise/.dummy delete mode 100644 owl/Owl_optimise/D/Algodiff/A/Linalg/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/A/Mat/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/A/Scalar/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/A/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/Arr/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/Builder/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/Linalg/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/Mat/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/Maths/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/NN/index.html delete mode 100644 owl/Owl_optimise/D/Algodiff/index.html delete mode 100644 owl/Owl_optimise/D/Batch/index.html delete mode 100644 owl/Owl_optimise/D/Checkpoint/index.html delete mode 100644 owl/Owl_optimise/D/Clipping/index.html delete mode 100644 owl/Owl_optimise/D/Gradient/index.html delete mode 100644 owl/Owl_optimise/D/Learning_Rate/index.html delete mode 100644 owl/Owl_optimise/D/Loss/index.html delete mode 100644 owl/Owl_optimise/D/Momentum/index.html delete mode 100644 owl/Owl_optimise/D/Params/index.html delete mode 100644 owl/Owl_optimise/D/Regularisation/index.html delete mode 100644 owl/Owl_optimise/D/Stopping/index.html delete mode 100644 owl/Owl_optimise/D/Utils/index.html delete mode 100644 owl/Owl_optimise/D/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/A/Linalg/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/A/Mat/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/A/Scalar/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/A/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/Arr/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/Builder/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/Linalg/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/Mat/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/Maths/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/NN/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Algodiff/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Batch/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Checkpoint/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Clipping/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Gradient/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Learning_Rate/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Loss/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Momentum/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Params/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Regularisation/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Stopping/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/Utils/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/argument-1-A/Linalg/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/argument-1-A/Mat/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/argument-1-A/Scalar/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/argument-1-A/index.html delete mode 100644 owl/Owl_optimise/Make_Embedded/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/A/Linalg/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/A/Mat/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/A/Scalar/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/A/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/Arr/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/Builder/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/Linalg/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/Mat/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/Maths/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/NN/index.html delete mode 100644 owl/Owl_optimise/S/Algodiff/index.html delete mode 100644 owl/Owl_optimise/S/Batch/index.html delete mode 100644 owl/Owl_optimise/S/Checkpoint/index.html delete mode 100644 owl/Owl_optimise/S/Clipping/index.html delete mode 100644 owl/Owl_optimise/S/Gradient/index.html delete mode 100644 owl/Owl_optimise/S/Learning_Rate/index.html delete mode 100644 owl/Owl_optimise/S/Loss/index.html delete mode 100644 owl/Owl_optimise/S/Momentum/index.html delete mode 100644 owl/Owl_optimise/S/Params/index.html delete mode 100644 owl/Owl_optimise/S/Regularisation/index.html delete mode 100644 owl/Owl_optimise/S/Stopping/index.html delete mode 100644 owl/Owl_optimise/S/Utils/index.html delete mode 100644 owl/Owl_optimise/S/index.html delete mode 100644 owl/Owl_regression/.dummy delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/A/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/Arr/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/Builder/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/Linalg/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/Mat/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/Maths/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/NN/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Algodiff/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Batch/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Checkpoint/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Clipping/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Gradient/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Learning_Rate/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Loss/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Momentum/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Params/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Regularisation/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Stopping/index.html delete mode 100644 owl/Owl_regression/D/Optimise/Utils/index.html delete mode 100644 owl/Owl_regression/D/Optimise/index.html delete mode 100644 owl/Owl_regression/D/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Arr/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Linalg/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Mat/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Maths/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/NN/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Algodiff/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Batch/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Checkpoint/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Clipping/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Gradient/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Learning_Rate/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Loss/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Momentum/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Params/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Regularisation/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Stopping/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/Utils/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/Optimise/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/argument-1-A/Linalg/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/argument-1-A/Mat/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/argument-1-A/Scalar/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/argument-1-A/index.html delete mode 100644 owl/Owl_regression/Make_Embedded/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/A/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/Arr/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/Builder/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/Linalg/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/Mat/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/Maths/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/NN/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Algodiff/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Batch/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Checkpoint/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Clipping/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Gradient/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Learning_Rate/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Loss/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Momentum/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Params/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Regularisation/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Stopping/index.html delete mode 100644 owl/Owl_regression/S/Optimise/Utils/index.html delete mode 100644 owl/Owl_regression/S/Optimise/index.html delete mode 100644 owl/Owl_regression/S/index.html delete mode 100644 owl/Owl_regression_generic/.dummy delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Arr/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Linalg/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Mat/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Maths/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/NN/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Batch/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Checkpoint/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Clipping/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Gradient/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Learning_Rate/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Loss/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Momentum/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Params/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Regularisation/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Stopping/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/Utils/index.html delete mode 100644 owl/Owl_regression_generic/Make/argument-1-Optimise/index.html delete mode 100644 owl/Owl_regression_generic/Make/index.html delete mode 100644 owl/Owl_regression_generic_sig/.dummy delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/Linalg/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/Mat/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/Scalar/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Arr/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Aiso/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Piso/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siao/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sipo/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siso/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sito/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Linalg/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Mat/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Maths/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/NN/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Batch/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Checkpoint/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Clipping/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Gradient/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Learning_Rate/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Loss/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Momentum/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Params/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Regularisation/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Stopping/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Utils/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/Optimise/index.html delete mode 100644 owl/Owl_regression_generic_sig/module-type-Sig/index.html delete mode 100644 owl/Owl_signal/.dummy delete mode 100644 owl/Owl_slicing/.dummy delete mode 100644 owl/Owl_slicing_basic/.dummy delete mode 100644 owl/Owl_slicing_fancy/.dummy delete mode 100644 owl/Owl_stats/.dummy delete mode 100644 owl/Owl_stats_dist/.dummy delete mode 100644 owl/Owl_stats_extend/.dummy delete mode 100644 owl/Owl_stats_prng/.dummy delete mode 100644 owl/Owl_stats_sampler/.dummy diff --git a/fonts/KaTeX_AMS-Regular.woff2 b/fonts/KaTeX_AMS-Regular.woff2 deleted file mode 100644 index 0acaaff03d4bb7606de02a827aeee338e5a86910..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 28076 zcmV)4K+3;&Pew8T0RR910Bx)Q4gdfE0Qryr0ButM0RR9100000000000000000000 z00006U;u_x2rvnp3=s$lgQIMM!gK*P0we>6dJBXK00bZfh;RpzAq;^h8yChW*tQI) zf474tf9UWmvjer;At_qJJ4ObAjRSzte{IG8|DTss#?U6Pq$r5$-28t~$dN6wErwJo za~1SqW}?_^GLyD_B})qv!-NCu+2=w|xZXP?WH@?W-qc{t=*Dc@7G{&*Rr|f2PJS1C zhC(0s6eQ>iMjQ6NMr%a(8W(NUg-6j?jOV&o6a!>CRL6BUiA-uV3!83tjRD8w9Q zTS)(|WV)+(idwaDgvnbaZjk7gd`Q54BYKt#$^sjr>VY-r-3%|Gm46yDaW9 zA*>`MVXTA%2t!Ch7$IRKA?zg}h>8dZvc$1L!HHv{b?xdd&bo@Vt*u>ZTiaS|hyA~G z{@0vZsQ;#>ocmS+q4P+Q6bJ==`li~vx<@m2JRmS77FvoOGC`1MckSwYimL)UDdBE= zU(y{*T007`?KlPI+1(^67zzMC`>m=oco?9F7&)oE+s{ZQpTPk8{JE5yXE%chKZB_X8HRih-qey z+?Q-qv53jN4{v&CO1eskfOCJa3iT;f#6SE4=USD}rard`&95=?zssa(BF1FNtXLQ1 zZ~TM@OYAGf@a}&8C9fbbx97ge(q^cIwlr8&Knje!sSE&n4+)%A=~R~^uDx$0UY7!KfcrV?PMq?9a+|xdk4sNTo`xT10ZSpv)=wBog^+? zNVtS)ZhL_W7i(KX_NCm#VEfLsy7t$Ty`QJ}p`|<%v{So>8SwJ~C zVK#U35`M*$l6LT#61}{p@LooR$I7G?Dbu5I6a`IQ*PrM2%Vs~gE%8~3WQvFrG9l=GIBt*Od}N}61FZQE zW6Mf!kslWpsbCTqTnlB6*K#9)4p5JHZFH&`%3(OTE6|h<2UbL>qb*@ zdi((~nNq)2{fN5qp6w(l(`U|}JCzK7tnN9WM5dL+$_%{~I)_r%rEhNQi6GO2QuU|q zeCl;wSf6R{mi}5F*{a2Ew{h$Ct$E8+)>QbX{}q~VpXSif8urVbHvX((@}GE29{i8L zdCj)1>qpnEU9o)e&|rUG`^nIk^FgQGs+6Mq7+)?5!iR%5FP^Z$K>>>T{oB_sI_aRj z=9+1$iKKyw1w6$4+{2v=0HnltxENCns)G`v`tJa?H5C^c{juAGRGbNd1U~z~&9i35 zPX9k@-dqCC`5V$MzXfWS>31JT$j&<=o~|&#q+%#X&U=D9f&}Tb07^pC z8A4D}Ml(bpUi=JEpgBQj?p@Q0JR(Ld$V{b0(M=-!GzM9T2&>ePayD*}t}aHUw0`1U zqAh3k`sNdyBBCu%ryXEL5@d#BYlYf%ScoEm1_cZV79k;{9@e1&FV>h?{?_{GD7(Wh zY1_fC_`40h2NZQV*O+^9i~e{hP2`(RmzukYLXF#SsKVb3koS} zGo%7tkm9K+i*(iji%E%L;JlwSijC1)9V3dU&^wAc&}hpw0=5-5{wk5$_LeV+$da!^ z8b#IXq~ya8YnKKV#JowMzYH67;%Gnw>#XGHksliuD1 z4sf2#;qa0o2PoYrWJNAO?TE>sT z(}xekn~&2z=l3sY6JDxL>F`|BeZ8tw6Rv1#*+3OHNX< z6Jb%r3)h9~LdqRcRT&Wfvm>kue;~LdmM3h6LKGkfF^IU8yo`jrf;@Q@`SKnV$Px-= z8AY;!Vp&Crj0UxsKu8w4l2+b)3W8a}=W_;cvxDj&lQ4Yr2Pb9t{F(&UxJI&j!s=|A z<1R_0NRVOpV8}5P7)lIZ3_lEii~y|Wp%7rZ-=ff1q-#NSB&_OKTwxOwuB*af#BQ|f zM??*vkDP{**5&fvK8-pFP?$Oi3#V_p?0Qk%E>xZEhIvbsX2u8>zi?VTqAUP95iv1Z-#B z=N-iKV>YNunx63yVCj{mUVk1=D0bUi8Rgqcrq|mFgUCL9zVxEZ%afMIYo2;A`#8NO_<8}^*$kwG$g0S*nh%*GK&lT^8}ewM5-i*4~PGo@f> zQ|k56T$}Ui2}bS8DNA0<8BIMu8^0zw&=xd4=Co{hrlVawYC0<=E|wNC)NWt_+csNN zIy2>Yd&9>MT)nU{K-+%zI01}~!&aNXn8=b73hfeR-9NCa#96A=SYpGWNUbctpU67Y z7J#K8lOvdw^(gTq6h@CLI^DB(i+(9XVsJIP3jUo<&yY*F$chz@DY6b+v_FGDRQ zy(J{GB{=zc3(j-n&Ty}Y_Pdh0y#)opnLCVBN>(uHh0=;ZxGnJ@^m0Zr-cbtrHMS^? zNh(@23`?3Er0)Zf3>h_v5-VE(Y6BoSvdJz^&>)f|Z%vTDFGLE~pdncXIU=Aj2&7~U znnsprIfEI^0gwtAEr}8*R{&ZAK!m#T20JKi7ISYQ2W{gW>o46 zflKhulrmUm$h6DSOL}awKG4ZM+dIT|p`by_jEb^GApmv6KB2nvQHeZ)Bec)KjUew6 z96^GE+JOPt)+pLSTRO>XsgQHp+4~%Em#xTZYp-nt7~) zx>HM4mn5}Jn?yBpa1fmen=5abpF<0#|07r1x*O`frFy%cL+Gimn`I)c4HKN#m zIKP%|dFF3UwR1vwX))!j>Nu3_PfWXtKLY38%rwbGl%u1PA>WCOBNV-~J@vg!lslo^ zYZ`v&sQQ0TM(3S7?nAqSA7gcey?MoKbXm86K8X*vv$vTW^zOCGmqfT^j!2N>PZqZfU)eC3Hb=u8e zO(~5mfdl(i5Kvx$-1BDNYtAtCNL=20#}ueqcbJhU~P*IcLl; z_D~AMFpw4E&FV%7kVH&Sk>@9*V4hMowiiV^D{Vaf<0(?tMI z!^6Y$H6U*loW&SHRI80w+*uN#o0TldfGdFDIh(u^5M-9+S(fEm791Xq1en<(E`WZ6 zY39v5wG>wsT>%2gf>|(4v}JCy!t}XDU!K8qg~_%fowg_lAny~xe&#M$xPO-}y=1?? zl>_t&c4JmZy-T#|)&oQ%RCGob^~BW&0fsh&y1&k{YJq4JVCR?|L58Ww7K?n)UERVA z%`4e&0A?&QXtKa8#S;_8R7T)_Ea$uiq=H)v0Jx!8LPoOm1m;~rE!qOoj*j3OJJdj+ z05v90+M(b?$=H(9nX4=8K}=AQA2w0?3q(E3p48wbMsRExq6(SBe!I&9u)Lb1a43Q-6}sEG!ZVxyG*+ll5axyIqi^b^#xIg-4M!a8D~7gc)W`%hsSj`=6n#R z2nNeT2BXREw+j#eH={#a3@`KtE{I8(Jkdjpaiww8X_6=iaLKnWS3VPbG`C3}A|VmX z+Aq!x2@T`sJKJVXV_Yga8fN@u9SGcCj^nP)J}#;q#Jq%rK>)A&Wg6zXGD!u#KIjuD zB>XhDF{W@f(MJLSmc!m7-|fYj-rD)`h10aRICwFz08JX)*Or>@iG};P;bsK z(jq_Zaxq2`?3gT@0pj~5(adkYJ|UWb=E@!D5U?e_c3wX3#SVwz5qc2jBK}6b>ja5} z{(nLRYH-nvzS1}&c!f!a)lr6cfl)SvzegRtip%46O`#a^@;Aeo1xf$@nZhAKK;9|V$kRhc(i4W4rk&j=S-bD3~YSEZpd z&mnxiE6#B(4E}^+Pkq1_K1!kyP!*p=FmbV?sG#^7M)ajCIHM7gQ7C$u5C)UI%5@dmt5!KkyX@MMhBbKDvLxX`695gPgE3LGx@MYKA6bkf+6Xu$acWM7t=Ij!ylQ3qP;rEJ zx_s%uS38Y>gG!in0FosChn+Qb$GdqOFA!kPUI#H=sVFFVF6DPFHBF5SD^v+E9*(If zLTg_->iw;naC?0xk_55eZhYD5FrIHQ{7kBFn=x*w{Dh8`wktpnH)O}X;?U(3V!^b=q;!l^% z<>sZ7$q@#b_Co1k-HVn&0^PKjU_qOrxFZtqY!x&1Pst~6%H!ur@c|VasfMCHS^ZIX zQey%IW}(33o2;{wHGH%~htcTvASztNZo;%dd&x=Z6UUCB3VQ+>VF+Pwaxa0R9LfP( zjDJTatKub0J~rX<$%x|0hU&+RE%;g)E$ulF)PxHVWrgF%i5fd^{7BzN2Z3RB{jyt) z+#WoqSS@m~OQuj|oU=!epU@V`D>FG~Lc{R*%_0O?tPL9Qn=B#k_daZGk0W_hMhgI` zVtW+%+0P%LHDvrIi{4<^w9}TR;a~qzML7oUuWEo&>+D36`9&~p=tRvbsScY`y=itX^5edpPEjaOB{VPKhoX^^yT_NbSpi961y^v z75v621(PDv+Ajhy6ePLGKw8^|S#$#^5E_R zZF-Pi1Qe{>@HB-z${K|-j}jdu4GG?C%p;gUQ2Z=qm(q=@wn(ey1lUXP@Qf3$BeegO zg_3>vteALF12*~I(NIxcE>Y$3!Dh7_88cZ3!wWX-Ayouf9Dqp_^59!dG}DrfX_wul zBV5W@s1XEPoNwMfkCS0O>SQCN+kGtX@=Npz$LfJiHh;9cfz7JUZL_t{$y_p~L7Mui zG=(Yim3hR8*Gce~gJXc|WP=GSB)F)G!H}pI%kkxr2(mGu6#7K!{JMs69JL7FR|m1t zr2Q&Z!h8wC69E8|8n*PJdCbFrvf;BzZk+#2^kX6wKV|<;PxLA`{k>XT43WLeoUwHk z67mboKunnX-BRpz4ZmH{CV0>o zA~@vboi2WP90`@UIuS{(VG9hRR{}nRtNLg)dfNp5v6gl$*Bb9_?XVS`kY0tPr)S(NtH+wJ!g5QUlgDUEZKrtZjMk4+JEuJ+HGJR5r zbS#dVZHBH1Z2+h4VOHgRc`C~6TImqW>^MPP?`$ZWMrTPGzF}j_gBy{Epj_ohbrGsK z!vU3sneup*>`z%PTVmr8Dt^08m)c3oBfkDnDWG=m#vFTq3M^~AQV+m}GzxenP@FA$ z39x0}3idwGqahrl;Ee2}+1%{Jd^N=iL)?9D3WOz1ij4QNGBX0-0Kp_$m{Une52HFD zs}L0br;yY5{`zwPwF8#GCQfu^yjM_L^b_d_Hag!~x=pwUtKPSSUV>A|V#tN1E3_@d z)DjTH)>iqi%^DyB&RN~ zd>&`gIGQR}aPvopY1UbqUj&d$3QnNofF4W_6aa!#Jp?J&1rm9REVXWxp3dASFW76CuhjO} zhSI!56VvR{lb1<}RDt$Qc?&QzMg~xRhm3BS#QvkpW*}xJUX#le^0*z%+SYx`F~jIp zhixpJN8UBf*B`&Wnyz~+=a@Ry1lx&7BBB=v=cDd>?`|tgyWh?J2bW>yKlkxbV05{Y z+>Gn=7tyRV!_H$bYUc@X41pLJg^CUuK``255lAx&;D~D3e<6S{u)bN?< zT}6dXn0R_6tb{4Fuh^K7vM{*9yh?_gz$8!F;dl-cO-*;)X^UNLz!*5WdQdpV1ST7- zvIRN^qi#Eq2%T7&yG-B#Drx1U{@OehANOBAjLBLP$V9u<#_?*!3V1eF!Zd|c1E@cA zz%7gsd4SpQaBo>WQdL01Vv%3&B-4)bMvbBBt?p`%o(q6$6^soh^4Wzrt?t_-+unv1 z%&JV>Tcg9Z_N5|EZ5AAABnqNyv_CeMl&Q3ZW0b@CZ=`v(;c#&@O{^5>d)e)k)0kk@ zj>A57T%OcJmeqQ%-->Zbp#48b|6q{D+7}Dzswks6t;de`%Zf`x{u)3M7 z_nAQiL3kd;Yb#i<){4}srT>dS*cRAS8gp^PvP%M07Ru~j;L@GTc{6IhsD-WT>zVpI zc`HMcZo9K^R~<;yA&cGuOWZ=oV{ZtY_=$FVWr+b?=WGb#tsA5Qj!6;!1i`V`leUjo zSH~U2SLdBxCQfV2SGRF%!fC?`Wyl``6Y0Y3JebJ5dFruCi-Os<&|R`=TDcWZAR80< znFxee=5V@Ks(g8kjUb{Ve_`|ty88K8t~QV)D;N%E>!}Gl<|eIG-;{z z9_~T@3^MF*U#a<1!AyItjaSOp^7|YV(Edu-v&iBa;;gP{Gp225p%jvw0G+9bn#yJ< zDi|)T1+mw_D?&#Yb~i2QPZ=nu2G8xcWtSm`src%&gMzCB?eG8#BXcH}Y7a+~SlpaD zoQ%}Qj8ihBRJ){>JiLN>rKhxOn#Hj7gVBb`e>`|5<65>Bj5R`<4NLu@5>1kMQz^+< zz;mwP4iktg(%~h0o&$D|e3dZB<+0-gsK z%6{kt&mo$1K9sfk^l@qA=9TYEpi9PYLc@gF6Ji-O4Bm7hl5MqA$k~y3#}=~;tnu$w z0w`q;>47{Vg~{ZuTgiV2jpF%#MIyG>owW#0 z)VVIDrHCHIPhnIknv*@IAyKW&Z$@7sl=F}ABLjYBkF*cPt`A8U^MO5OCg)KFOx%* zcJw#xI>tLYELSjpU*^q3A67}vVwbr%p?ZemwaY)HGV-KG zF7<-UiIv6IV7kgqno~qI+RbunKTLT7%h?+|EynV^w|p*aGQ8(Dd==Vzug}(KKi~kN zZFC>9cL`=R)%uN`7*1&y%9j80>!7l!Hlr1tBUun9c7r{CgoNb87C+4noXH+edK4eX zKGgS(!KG2;Xy*To+51xU7S6PIeFpPZ08zO7?7Hpo1)?QQKxq(Uu~qZRbL*GtTkQ7M zfDWI+i@2l3SYF2tK*KJJq0+`9t@D_XmYWUd#lsx02k$9ej_n2Zb=eZ9NRxJSZ7f*6Rc+->2g3_7A?CcgP=NnL zqsT#3du#KdNUNGer&VpfJav%R=AEditkuKy2Q=X3QpuiE9N9|-|5GE6M#2an{y|z+ zGLg!&HsUyP^GE5PBQ?aY4eL3cQBXzJ4@2-uYxy>|&e#5iBXWMAJXt=cBcGuCn1P;W z^ovAfAGQ~SQfXTiaBC_+>@rGGX}r0jw>VC5Af9LBcyQ?TmTGEy1*t7GNurL$I#yCS zdDfY3;+KlEJC2I>GGVcAy)#R-Mk=s%btQB-sWMNILas6C-?FM4CmNeIp;!YPMJ}eV zH>!Qpg=3$hs=Ifn_pOJ?Ti^lAtv88@)S}s*Q^wmhS=NiunoH;RY5czhEPeLVW8A-Tr(q=sQd3qtnm605pU_t@>npbbUe7ry zHvwStEvghqUsx(>WtMlyw;=Ezp?iCRW9C2G(aV-A6w#!NwJ#r{5PI_~KKBHCeQ|Tr zlbqsENO;YdvO~xG*4GizyUF-JR|75DM}RJmtfrShDtA2l&~8E2&4#=0Hm@kMwBR{+ z|MSwZ@4ow{+9Kn8`XyM5F}AP{ljYS9^`cs=Mumni(-CtRNll)~cs;IuV)d3 zBl)=N(*0(j`PKCtGkiC~YkZ3N?cBUd4P>C4NOp}O;hBpi{3=s~$Za*6K z_FSNto>>KgDIdhV@wf~}(Ok`t09KxT8|$UeqWb4kCxOu+E?A%SA^W+u?Q%dV8BaM( zUVw^yT4X;_@eMkYOuJmAZGE+YH#tc~WiIot?Qn3)Jt-YQAEH!)?LUvyL ziyBQ!zizfU(ZPWVXjq2$C~2k(+rbF*@b1-J*rWl27 zjI=J|-2ncP<(I_YCuk$#6@pX~0H`;RuR}h1G5nuj3yOl>?lo#37fd>)l%9sYOI>qU ztJo0{OYH<``2Y&9)Usj`P6LTmks%qged!X0m@{m4w^AgHp9Tq#9`AR-bX5m2cp3Q^ zcSMgN%LYZAFtHu=T7E;!;xG&_TsdU>}4_-wPn{)QAGQ%}SF9IBGt zlxHky@I(|6#FPZWXk;c_zOx5B-~&BdKNH#K4o^U?^>(>D@bo$@MKf_%34PGRKRGEV znxXHnPy1R{HM-{40f29HSIl)@9Lyf(;5d@GAdUc1H)GK&Zf!m1>?kp6vYVO5cA(gb6rSz{o*nyoPdbyr zh23@5qDlD&>5kN|AYJv3@@fZuTg#;WIP(48@ow#bu`y~3?b;;mMB-(AICtnfzT>#B zeGzIL&7sHpTAqve)wq(X4jmC41$2QyOU&Rn>+cDw-xPM|V{7g_aEP*(l(I-FINtB5uJjH>5+fMZC zujOyP(p$jmN%f3hbaj5}CM?p2;=EOt{>BaP*xq!Ps}|l6Sh)Z<<43{-V}ZsVZ7LJJ zyyI4Wtyv9<)CDuplSa9U6;13xX68;I7yW@3OqJn*g}OpqLBrV&(#9A)3o^`v!fPNF zm8UczpVvIYtsFQdlH*G3@Oa^-4}$QqT2S`~Yz5!o*39jbdLo(2J6VTL@UxNxeU`vpX>8_9E;kOtP3Zg;w` zsfy9lzhyM)a#inf2f*yh<{%-NG{$F*kZtt7Xwb;s=0mU!^BmMx!p{M9nsbVt7%qqs5yPr?B>1^3?@!Ci1%buN;eI@> z-3q|HVmO&008!m_8E!Mw7Crww9+`Ck8=A{Str5^Y@wwp9uxz)ZunfJjkWf1m-M?s# zjBzJkK-9t#!3{3<*AE_xsE0ahl0puQIBQ(?a$}1|sw4`FS7ImNv|-f6lE$>wjNC$NY(BWR>)kgK(A9ScNj6zs-eP>6BE(VFQhYa+i&|Xo2o%I zKO^{>NmA2I#3j&7^4vPPB$dd#XTP!BF%M>dHO_y5Nw3{kBYV}VIA-gYTA6qUMiCWp zE?(Ms$!y!-LXLqMz+={EW0qZ2Bjqx%zE5WWgmXTkgJZ{Wjt+>JnMp0Ze9neplA|Y8 z!#_{9yAINCDte;t0%yUE=br1zk{6WJq2Y?38;+^%Tv2W(ht*LEwjeJU-v1ISHzy;p z&peZcAL*)Z*p8)}_7pf z3*8MaLDCtQZ8y-ccFL984f;RW`Joakxgasl_5&9R;lNF~_iX$fV~f)z6>@)1r0!GU zE9!})=fyYtblFKRXijR}8tJ3YI;#|0#>X2nrf$a@DyT4)kPZ15(V&{Ahz^T#_+saP0D0lf(*g8Ytax z3J?E<*7z~>u_|V=FwgXL0V9iJU8soR@})KkX3ToUN)1HGLG5p)Q(OU zSV?GU=Dh82Q$#J_$7kKd2w~8GVdt)gal=L7wo#z|UDw~T(sI&I0Sk7jCA^a^=9#P& zPF|imA@!XfY@_u*r)?_dN2_R_pFEW*{1(qshy9>6$^4z4UiR))#+yMyOVir=TtQgJ zei6~)8p+nZnSagKraJ!#7`G}YFnekCnba$VT3p2Db^Wn%`!Wf0YjvV3wLL)RD*N3* z=X@YwI_PR8C<3ELIx^j;Z(kvV+m1*UL5dOscR^WMxY z@7U^9{ZLkA+R%WMBgquwAm2N$27^96|L8vGTVfaX}n~e zh*#&$0Gzg%xc0|Qd{)0YogI2mi#vd+o;@`-(}s0~tv^(?S*w%rG5ci;g{r_7`foD^ z-E$`j(sj)Kuc3qe@Uz>T3h&S&6&(h(5q~;rLfG(&kZFVHG2Q^-hlCQg=f4nl67gm zvVkr80D-OD$@V@=7p*|cGm~h_T~toC4=?>fwo{rTHoUK}cO9^eFOQjv@ih16oZ{d? z8kpqH{E|%!HwVh=(g@$&Z9Ok(C)>B``(V_t$-?)k{hf&GM_o-Tf(u}@Wq1CRq|Wka zj~};*%<2vNW-ooc(?X}&luxqmrm&G*oeao;Fw$6fM!V`9gSrz?<2QySUfAU(Ct|QZ zr`OxVzD-xfeWtykzNAqN&3`0vch7gdyy#$DW4Vwg{+|Tb5r1{ujirL zftA-mV$YvnVq+;I)VWAC<%c_;kH~DunfC*wo|lg3gtJAj0}{EEOZ0fqhSu9H&=T0Z z($vS19blLK?7{4qe&d#YXE8nX4t5lXXcy(yLhA5eR{ums@urK+X!y>78sLMyQ&zia zTve{Phx{HasWft{YlZwRK3Cq+?$2G=D}23RkGcP~dNTS#p68Nkd|s;v{qA8`T3`SG0n;V{8;M6Wa8n?f+&2mvaP`*v zPby$$WY67>g+?fOvBc+MeyX#w5AzA^FH+O`$D`>9onaCW?WToO_oT1=G!5(T-ysC@ zK2ice3NlEDh6YNM0!tG+6H}NknCjn%r0l2^x-3hf0g>HS$1h;A>~@i*Kk(g#EW4{@ zUg0G47A)~{FtceGtJC?6&(YEz;SWhCAlErHBiv-aTork+$j#{{c-gWz^tOzvIspV( zcGFvTA3$Ivv>li9r?(|oXD7psKspBK#fP9|r)D7^HOS?1-0Q(BWyAl==3~YBZn$w` zzOnR2l&rORr%HThtffMg9vMGHb@R%}`~n5qHgDlq}0`}VgYrcF+G?4@CZ0W zTxKy(K>9efWzHZ0B@w{jusVPtQUc|vD`_Z|SqhJ^nZ4Hn5xYlO4o~R-gW() zJbUo^>@r8e5c@tAzNYD3ey3o2v#`A!jR~_mFq4KeB#6G5lN-@2begj9P9D|zt4}n7wl;PR)hp?oM95|8cpKL9bWCng=D#IoW*=DKW;&q`)*jvE z3_N?Uk0hzRyAzvDd(6xSM z4Z;o zqPvRdqaQ{t;u&81q+5IR@KWK1KBKNwm&vpWlqwKXQH54krd~;Xh6+Hm-`bry!Z`JT zp6-N;J2U#APj##rNj?ioX$e`@tOS}AvQ>yJhy+H84;Uk**uXyN_Fg?LAFdRHLbdJ> zPwAiMo!rdlh^p#E-m~M#MRcZb01^dEZ$PMj3{{8NCx`0)Qe9#T*R|jREQv0592G6bVF#A50kF`WYS6!>RO|bl~T|w?`HK@ zrGLyy&{to*aPSL&ii2iJ3HCN(e#JeliB9t5?OipMKP6=)J4cW2e|mpB?6dm!>iUVD zFM2)j+|CS0pll}79~MNJToGhnMVhV9B*=j40D1GR+>c9TH-1H1M?u{$0s3&%a9h_d zF_3 zx;AU-!wr7v62r{!=*#am; z1j?0QvIQdY0!huN%U0DXBJza1_rn0yhhWiSU+_nen>kKH3-mi=IpR+$d4}}*GxMqS^0^cJ_756I=NoX|0=y|HZwUu`I{U-P(E6^Rz9}_%@H?s2K%4_B4~qv!9BxsKzQLt+xaIT(ISMA5qI5A zZ;kXn4+a;yXTX1V*9U3P((wXZ$QeAmU} zue^rZVoEbc^K0l5dx5=lW-7c03ol)kyXZgMcKSXZc0GjO@XV<)xt)5L6UDRVxJf_g z9GgSK^upXpbf_nbb#L>ZLgMN+UyFFb#Oio5R4)Wo@L5&{4FlO)U7JsTMnmYZr zh|>)18@*g1=8|-iwlt-H_|90z;J(t$h;C599NYcWiOaC`%aSh?bvRZBYUPdLR$M^e zi?Oy7|Nq(e);VKU7l<4#i4kbmzm8+LF1MTh4!!DA?8Hv`% zfgKun;HTFW%K20SwLiZNnorgF6|oQ)pI+2rVq{QprmxQs;2I4`_`JITwL}FSBJvH3 z_g^Zb^7D&G7ruf-zd!{CF6kQBdFx4`&l8ejNxY~^t*hPrDfg(W|8qJm$m>Co5lj=B zWS=l(w}vEM@Qzu_ppVfJ3QRH(>&Mi?Owui$6c#Nzocp|~DI4|R7m@gSI%BG?-cjA? zd+F{s*B3X$CAS`8dVkKtHqaSs)Wajhwvi5sp#R%g+v0nD*KXWqVm(X#+5Nx5C6|4T zNeR$f3IRl+E}V8-7We;winUQ$*+W0E|M2MpggG?L*0g4=iAG;fC;t{!ZcUv#6U_00 zyr97zUb_b7wNY3z4gBWnnhwf}Ggr1vU8sAF_T<#oy|vG3_X@%wqc?8x9(?Q@%@!TY zg3T@=cNkPS=Rq5{0#wjpj6aG*=@8UE2GT)81GoOGTr$iDZe~n>LtRIqyWa!!VZu*M z>-L#jrHo1h$Mwvdlu{oTRxxJB>^y~C`i8jXfpj#=V73!nGBX+~7>UW}SB|)QKtTf9 z21%CyJ3K5stKD2}NIBuZn~-RhK+uIi1XS%kn8a3)q#H?dOK={zQj;T_9mf`Sk@UTE z=CJyv&}u*2O-A?aXzBoIQ0hkCKxb_uHmdEu$fJiybG6A&z#PZ1F~Xr~HWw2+ne43c z@>~y?S(V!~m%q39TQ=RP8Fw}kJG)AJ{CtshRG0xen?Oefq^?8q5ncA5)j}Z>!M`~< zZN9UlJ+l%5qoJzv#Y2Fx(KlTkZtzDIRMz%jn-4z(zn>FrTEGb5mbS|%VadUB>;0bTgVRDRF(~JP6c53;71>AV zAuj2Z9X^Gl$f(p1oA=rbvM0jxyu0S(cMds(fRL2p9Flc8)xz_A@J*;N#4-Xyg5i;E zTaN^!U`sz72vGOT<{ax&m43b{)k6?cI!=3x*&zw=|I$RVYaJTSgCg*rAv414! z2__vhy?2iP?2RtP$?iNKPh!!v%ZrJ_GU?%&tU~ighs^n$nVvp8_hh0{pINnlx^UZv z+b};4FB6R9tw_=wJ(S7g`1LJ!Tubwd4UiCm=5LoLRD3u87~6R8FkfQDt6XQ{Zi{u# z-6;}DF_SdBM=N4f-{F`7P`n~jk!-1kt~s(V`O-XvVYN_7aitP^K)KR_+gK1EH4ayXY0Zl{6hjKDluYkIRmm7xF{bfEPTOYyt{<*GPo9a z+Zt&I*NQ@VgS!YJyPfI5dJy1X^EtXRs-)L`ZoXa$VnfJWRzipB8+r7hmz8KVK37;ayl*S+rHP5;$-fx zC7J?t3h|4b@xKlG5loOP@i+fHq`cVu%5pZtr6Ia7EXBnlzVblP^=Y@^c+2)D3nmxR zR@-NMUB!>IOjTMCeuL%y^*+>LC}qLeoa&Vh4O0xAY3K*FiVnwjWha)5_yO}0#3FS#T3Ra6)DBcA*bHo82HTKY4%|0r75iW zzFeXHOoL>>?-AN2yn*gu&dlo&zQsu{!E1AN_IQTkbowL>~vK2zpmi0c)(BGo&S+40{w5dSaBprlCFaw!xt zFHa+de*4BebNyQA33Simx>-4Xr7h}}0&jYPUyDyoPqhaF%JnIEP6#BUsM5eC3B&7{7`73etK>!#q#P@E`Hj+RPtDXwVD0M^_fK z7B|YI;7*!&>UHE6)_CJ6f6vF@{*-uX(EByuy<<@2$sBH`;m04Qo}j_|AKU}i?q-r9 zgmBkiOU)JLmOJ;r_4An+fY9B|J{6B@D+#q57+a)S!HD2(=ZzN|)XVCz1&Ue&L~fI_ z)N|(i&7{4Vqakdy^>+(vzQ1)alNyK=vx)dQIktvI(2@q)7K-2Wv7m(<;^7%V$u6Fe zGrksaEammn(6=AoH6kj^{_H9E5GWPObtnE7{=MNF*|)0#%!e|hRf}1LcpT0uc!So( zwaEW=$|7w@TX%`*ej_Fl6~HMl+AI6!hlww+8o zWqMDooGi&`$*SenX0>FLkn-A|=_xpKr^Lfk+G-7`aD+T|ee4JUw~hi2S9`_vRxgDw z0r0IAYU_|lV7*a&&#DITTFSdtgMr2CEsMtB28fYA!xs?oi|Lg5?3d8kcMYMlK zap()yixRb8S#-rkSDadQ{{8#3t;~ZDGYOQjQv7FZ!Sk!&YS;*fe8-;Jewzs|8{VHU zrQxpk5>oxjO4RnSFa)6_j1;T<%Tp8XxiTo_cYXoNBI6y}X$4Rq&=M`q457<*)DI~GHNeSr0!^TDsD6ix9wN@PL=Se=9Nh5+fg+(oUS2(oB&y;; z7`ateT^~;pbq4P;(Zg(Iso?9UXmnV8FrZ(D!92iz6j4w*C=o&AyLzKf1=0ubvCr}y z^3;mL?94oiF(a9&0e3Bk(zF5%Y!o-b$7S;WpGvx$sBdplv(<`{9DyaZ=dG&h^$}Ox zNR4+ji(p=G*vNLtc(3_qV+%Az#Q)^9OHjfqd^Db%3)N71Wh zpnF$6&9^orN^I<^>8z<%&l;AT%e0SGFPf{G*}Hyy`;hasWO$ak+QRN~s)`CZk+<2X zERPASZ<%saqT0ZfnY7llu;BsK@F+4eDj66Kv!-cHGOj_LXnNU(MWvR&Vo-E+(a3(@ zh6Q?6QIxWpJHa32u3rKo*s(^sSx?blN-huh03ZX2_Xuu*YXO%+`FEnDmkL9y9;Ph} zEDZd24~j&}n(DYPGAU5(<+@f zx@`M{R^c_d@{>BjrX8#nv5V}}<5XNkW15a#PD?86#%K*8#pMCllGx-rVUibRAA?aB zpRF>kwq?Zyztcgxx+lQz&L7=%vd7Ky901%C202Y^I-md ze+^Q-57~IP>Z864&xV!EV$UE?PHVb-_Tyw9TiAa^9$mxC8d@}skyA35d&qhba*wwc{Zi>5J)8dha^_IHaL|y8CPH z|IYOA^SYJjS2ypPH($I7K3e z;3KDo=6CZfVhayU?w!s*cI=8)-SdY|jo=6riC*OH0_XR}aM-CmtKHmxIxwpTcO0@O z2;*+pjL`)Fc3?ny-1WHh#n^b38`lR-FN+Q{7U=w{MIz))-=_8b1H?lY)`)swaM7~K zdvd7ZFmRyiW8z~t=zh6V#F;-KB9YW_F?y#=eKREsibP1!Oy2eSMT3Ln4z|lfVxWKh zrallYJ^qBrSgRf!T=d#q&-0T*{)mVEnfJp-y_UhA8UO?D@8z{3A<{(0-kl@)k$#oD zUf;Yd&B)HZi4JK9w<7P}d!QfL#28=78XY|Fo&rUpN{OM7uMIS31boc-I3pm)Y>ug} z_Z5jC^{f5sMp;Y8S&g7?U{v+QY_OLbo~TAa#1_^|2D+0ei1IBD9q0$o*(4u!gb(F@ zJa_$Ty}|c;_A{FIGe%WU4CQu%`H5r-UH<2g+_RHngw7?U5 zGi^en^mGp`Ngh92p(4kCff@gyj_mD_|Cr_Pl909=JYbAg7KNZG|q}Rw`srEbe-(0rvI@EtA)y+1M>QL?DEd-cD@Ch^#`Z z#+S0-42ERB$A`RSS4KuMycV|20k)M3+uGo^Nm1$wuwtQC#?T}Xna`f8k)(TD$A~i+ z>XGD?4EY1$jT|YWD-vh@L?I}A8hyd}Iy;MxiFSWW^^RT!aJN%z=BJAn17l#-#6Iw7 zIgJ|~XbGN$83Q61Q^61>^QuH)h)fop{q)M*U3WXOzmAs4kT6jdRB*Wf22U|q?^4>M z)2&g1EiLMuY}O8SwUfd0Se>Ok2WsmxKtp@AySD{ z5JPaei06<1iPWuAj`H^mfC0p3OvmO|@gpLq7UayKNY{GIM`2c0OYIS_WesGyN{#gN z_*WhuiU$O$u+$8aUJSmT)Hf;*`|~<|C5=uf=U_! zvUfHlaH>=Re-I>}@KLHt7?P5h+#K+T%}YLxEE}N<0qnQ=xBY(hd&(1h;dVnj6|ezp z*od>6!UG<^fbd3fV_kBfU_CZLr%B5LH=$Y@_8Eq%C86U87u;71UDbI(hc_Sfuk_to z5~Rv_kYTJ1E7?(d*(61q)bV_FH($$s*}^#$E7s*Fwkwte}-A+VSM%0<6WxqRlVa-%fLjzC{jmUB*) zgZe@Q^y&u~*aVLB29eU|0y!oZ9Lt_)x?uClDn=TQep3V~rv(Pk!525~avY7=4L1MS z#AYl7?(T7CPQ3zQv^AxVG1eG!7#v*6U@qMZHpQ)>;}bU<8Di21V)r;PRzC01LtZ`$ zbDF^JUEtR|7Cr`c?FObA?qJc2b8#lqr>5ro`Q}DqgS*e(QWI3{EQSb_DM{v3&+lDK zCko5zhn;UqZ3u=QK4wnwVj>{ci=|>$Sy+A`&OUUPxx1;{TqSPe-#0|LbKTuYvD+JM zJP^K)!SAk}@(x7oOLsKxi`}KsbB3{BljEUL&^GR`G0Yirw zFI5sCyKh6W35==$%0e{RDf=f-it)zOTVn>zxt2VMjl$*Ad0kjktay(Pl9W>Z^sTUR zLF5PGsje5UFS1%JL2xF5$}=ds z?{E(m$4j4@b#|4|EvuXYgDin*aP3-!fK7<1dTz81Gn&DWA|RRTgxZ{Xe+TR>}*j{lW<@eoOk5+LVq^@*AB~ zRivSmvV&6OUnp2oHhm!{Aw9!L=Xf=nYb+VhS~+Wf8Long%65CeJ&0d+XrY#`7r2tZ z@s6678M?<^n)YL2u>8s7Tw-_}pPm}P3SY8fePh;q}|S3rcTi+%6umz;6{HUxxZ@ zjXmrU`ft8IeoagImwplZGR4|as?eAI40od7!q*fIRgr%#nbc5@wvkn0`3frQ&)Usg zxQRsKe)?d(&is0D^}C??=8XPgL-GAY6|gBKL)+74Xcy|e7itw$E=dapN{7fw7UOtp zAT9nH^JT)H;^&D|?8$Xu<~s)aIj}#aEu~}fAdKU7-XzIP9pZ|yVGq1Bc$-@U!zpIRU8{#lFJCn!vUL1CYqwRk_* zr}m$|x9^C=5BZileD+MM4!AD9*GUS4VAenJu_a!I+|Pw#!2a- zsFvs{u=+G@Q#gE7O;qwLWi1B)IsboT1e@fdbq|O8%KuD}(g>2}Buj&f0|T=^3oX_) zY_)8&l2sUOGaXMDL(<36H<00PDrO&S2+fc0N|p6YOOp1%JsDv30r>t}#4(#mjr!L> z$uusavm-6CAa3ZJzT9{+d-`h2ZC1V0FC_|&C>FFaNc5U(wl9Z73QzuwEHxxa!GaH) zqL*vC0ldBInaPPU*V;b$RIFDPkkxeTscY0yBs@aBlZ81o(y(c9>$b>qA?%7?5UaWS z3atDP!t$SB6dOB@QK1#{aqd5-o*ed7|V0m}h3^$jfAv{~Pg37uME+b7I4qh4*%lExMnA(vtw=2CVY{aTbtO8|__yrW1>+jR%O>k50cwFUl}Q8OWd z=CN9kLGC?sV85VhvhpKM1cUw=hC+VP>B8fX7CahF^hlEX2nsfV$s}oco+a`%@!zEA z3SF{v8PURmOe&wpF+++7b$q3%JL-QKly^1Q%IRU?5~P?!Zk1&=9lJ%GYlg^o3j%_2 zzjBEEXA@^|YNmYr^Qdo=bv~=)MthzlO@>Wi6rwL#GJSrGsaHBM|5`smT1g<+2T*uD ziEagqOi;5xJXLo#xcO`P&UlGxFxF zC*h6nfTKV>HMYI)@2Ajw2uWpY5=(u{6uC%(BS+_1u{FdeiE#9FIEjJMKyQn;6<)oD zWKws)T{%>Zro>ZSUa4LdfD{)$XEP^jt3mlsHR`sF5Lpv+taRhL69K%UZwkKzh%5&h zmDxIBL7k~ikdqPN0FJ!2@l7+CkoU|t%yq+?MVrBHfPm6WUSk6*gYGV-Z?=?9=UmgO z7J)7OwsdS$X(c||%`Hsg?q@%zhs3FD2sVMyxN@(MHZZrQ&^;tr?a9E7z_}%%O^sj@ z*lW5&^X-$9gj6`Tpn~4Kag6N2Y>BQ926>MCVyk*!()icE=cblz^5*iqH>H+N4>?XT zx*1G9BBEINy}^cJXR&3R;Nn-!U?!D9YQ67M(H}q)Ug+rfL>VzhO$);3L2m<%6OD$& zfD7W^iKiON+XLFm8!fZEvcJs&ZrY2He$7>!G=nphKPx;XoG4FBv82~?9r9pZk#ONE zqU6?Y>rR{6Cnnmf^|rSsGWFH-uIOsj2ai7$^X?B#EOHmSFFv~`Q<=Hv>|*71o}Ku# zIB=bPyJCVa4BX@pp z&I^_NLXNRrrf|4aa^~2vCvQfmN9c0`P4;p%<{~3FL&fkPqVuIWBtp7wt|Y<9btXvW zu2mo9ut4(Bm{ee{t>|8-T*KcJ2lx#hTn~!}>EUbgNza;)4`7E>lZAD9Ip`{H zU)Nr)9pafN?6L6^=U>0OOd+Fk45XrWp?2S|i>hm2-w?fVrt?hS;{L&Yz~}?O&*58U zDT{xr<+{;icTmh}9A|A=8$#ecK5xFdom+p-&l%`^wd=z9c|bFc0FM+rkdtY?*v;CkDnJ!PYzfLhH&glf2Fg`S)K{(lejl5D_cL! zV5w?#b76sM5V5nH%~<*$`2XnYDry2LlysxPQC5KMO&VUhYRNDddDUcpKPPJ(=QM%N zuBtLs4Q`ybH=HwvTWEk;Mlg1c{nx97jtp5H*T%U1ahpMSKY$~6cJs^`cK6(5hCeN$?!~|8QL3!AvEnj08QxnmwIT_no-cZjKh* zpKi8KbDQ&-KI&wtV45R&*bN|Q>9OF8TzVP;))lMtMoqw(0D&N2Vw+76k~WkHrX7!r zSbqigH~?^_H5GgsyW4Q#!;yh;ru*j>U?*cl=l z7#20Xlv`%MwQPw3)gRsZn~DGP$qUyPAmTJ*YKlbT9=&^gIE>0jB4@pA{hemuu=2sf zGY<-q7}zkIY^H26v$#mmR3-X>1X2__i9FLvUO zEUKu8{q8b`NrKrPT~-Z0csbQJT!G6Wvc^Wu{xy+jf+lc5Fk3XA{phGhT{;g%b#)DZ zauEt1ik%}lli2fpm*rOfm*oVJ8~yKK%rOw<&{_o$f!ODC%migRZq}MD*Ew&_R!swqXraaPGqa5JASn9$E@s2ax zXyFT5-X&-(y1RXW!j}EkvP5qV%af?y=gUN`S@%n;--NYv)c5{8Q~RH6){D+5U=QYr z=&FYDAu1`Gbp+JN>2yAs zK-y4NK39SM5Ia9^K^t*|%M%Njt3o4g-^URc6x4+1U!8PU(M3G&k!)5}lCy#Hn+!PK z*$&T?%Q9In{r(z53uhc9mY*jo(-ra?IPZQfjUioGue z*`uT0xe*$Ep(H|H;^t>x*D0gBlg#`g%B{)OY;og(#cb=ge*;wsx*XAg1C8Rwi6zX` z&W6rZ=8_4J?qn{93%UwbN$CTz1u@s!Ty+iv^RT;KrNb+;H2A$ZHZBhbhKFy(K1lB5ogW6gg`){=#i^+0T29*ST#KD|0;EITWiCXVs2~v&N8N!+L!QF=Dn48n-)G0Qu*|Y4b*-#?(h$ zxLn--5t$Gg&MQBLedOKBd>OhHA$7JM$8TXO<$dD_lTj%PeuVHyPQT>w+2sF~deAHH zWPpA^)s$mralQY;FwUy*e}rQb81vfOi;d1207W3(G+PN*n}$D~ySB z9>JCQ!BBO~P!}T2-a-U&@%Oz2zUTby|b zI$$coBSODG3L%ID`eE-Kl)Mk4*Q@aIAp4^pfq)WOd-(94=P^kt|2ra+eXr_%)i!>FP9@eat z-F<~r?uIaWL3AH<5@(3gPq$ltZ{o>$7Ub!j*6=$~JyEAy2AXC>=^&!_N|$E`rYSGy z=lbXQ!-9{wB&Zih8NHSmiUJ|T14Fu)WB8C73R@$VIx*a-zFM>;HEKabw@Jyu_7S1= zgR|jQD~)a8k()#^calY=KmxQye^|kufBdOLW0yO8EffE`9L_>eMgA=aUAnu>#nPzhOszZ^aS z;QZ*`X_~vQ;Klq8^ZaJ27m_9hk6>8tE;9&9hO1p!FkQR+f;hF@w#4MU-J1Uv!ga~{ zv0r}P)1T{ryw!&`Nyl5KA=h#%L*c8tvaysE37KUcX$Q#K)ad+x*~hMYTTfv@HCmmQ zC>=?x2!S4H9_dk=VCrCFLC|J%E@^mb{CVPBqej`_+n|EpIY0eGyImg!*ChjMJAM$1^daevVkgl z^ed&_9C->OxwOXti37z}&LbcBBb&>rMzH%TVb}92B_pf7D?}!9ws*QLtEW3ln&z41 zw0JtDJ>9Y_@AT|15BJYAi;g}$)!cOYR80d-MOn)DGp-lMM~23EdG))K&LtPJ2@ODT{O_-H%+ObAKO&ldS{wF+>l$E==@{0NLDjDohGW9 z;IN&v_-s?Muf|`zzu@}*`quNY=^){#^ym@wPS>64-Me=8(=paufK63QQ(jWe}O7sZgmz2feB|9TzB~00|MY! zTJjjcxHzm@fN59vJ(qS|?zx$hLZPN)_uNv1QZ+|?qiWpBj-b;buDwV=mL+v0wqvM| zrTC}^?Gv{E3q+tFIx~uR_yf3niQ+uyq@YL`*-D&h!0wW$M7Kqnvwr(f*r7cpP_MG} zmzS{~3Q;n=SH5gT7SS)2qaBG-S0~w46ky$CnDEfq?QfL6Iu7ai;|tJMcYoII#ChV} z1GGsx!W?L8|%w`tQDlq7iG`!j^o_a9auBH9-Pf1>8`@GyvnBGvft|!$eqTM19?-sFHPAyYf?@MPMNS)JpO0q zOYxV##F23nNOgJr+6?w|`}wxx{n|$3l4N$u}kH&(tirc0S0y!S4BTC46~TC z%A+184~eG|pNpR-vd{eQz&YUCqa^yieGMD0lEpp3NG@v!5Fwyy9y>-#;~vVYaP}H| z)O{81b}7Ox(k_rYKmmIyF;Ah56v*nEHjp@#yp^D06U~!laY-!hk*t!z8ir(*XWcvu z!p>v#s`;X#d4kS3VN>Do;)axFaYmbSF4b5am+Di3AavL#JTzfb-@^>6?X7?2_xffi zii7&&ta8zRm0BJP5TIm?Qoii z(>PUPkm!fMk&(g5Yr7J$Gf)1xt)fd8Nr1y-EIK#nKJ zF9h0ySDNO=v|_al#r9!z$Xl_+1{^hU*ZW3yf?emK4c|{ol78-ErQHrD8Mxe>>bzY$ zQ>4S?{{tGnd_5fNIqTV(c3`9+&?le8%;N?Jxme2J1TSfG_GAat{JPh$^@ABn zO-$@_Iz)uZ*u(E#&HpKUbyqV#X09%HAbY``gQW+mRO~*M#Xru@!5Wy|8I z%#t)V_SDtro?+EFTiWzlhU(8E zpgI&1D7GJC?zFu(#1UH}#*y}@&S)8VYoGpmE3|ygozR^7?^mRRhd|gNS=bp39BlE_ zE@@h+f0P-bC%#J*RaWv6wubm5a|`5)K`o5~Z@LU5T}sgQ?12InCy@kkSF*Qv)88}R z!R0F?VQ!9sQPb!daCVZ(n7jh6N-a_={Qmpr;^$A_dL@vFIQ<4j_cxCy1W0Tsa*uwJ zRGAeqr+)SY2on+nnU}LIkx8>^GMKc+zf=K!XI&{zt~Rb0jZo`QDAl`|?B`YGqm`hF zDt-%?skGS!cE~*h4)OU0Bb9y*qb%gZi7D~aeN12T_xkl?%1<*r^9 zFDtxwiF2eI;AY(DOYozZ$9=5|)#_MreorwDb@V7x$fJ?|Ka0eML=zv-G%N7_3B?vT zyE@8k2T!QNC#J+x*LgWt>gPEnHU!&;(@3bzfB@2Iw2a!ojqMy` zGo`M~(ld$+9QM>W6+#IM)N@uYS=c*!dS!{-><(#d!pXwyv;=P#)Ierz+c2`QV@4_@ zD`agPTe)KKqWLpJXw>rGqjDxl| zRuoTJi;qY_O+}%@YKjQ*Wc?^(O>A4cdhtL{gE!=NnE9Rcxz3DG%AsWbxb;{I)xBz>e>LR!$- zK5Is4h=_65-{!k<(Bsd0bwr)Cfa5CHtZ2}UT$$2~ob-hTw!qgMg%z&{`ijbR$} z4*_`q2xJ4mD;uSS&p|4R&L{&Yi6k5VeE1g71J{+{fgS>+nkh-?5NrMT@#Jzu1f)NiYkT;}6A<~VRe_!gu>wlsUZ zO;FmoE-P(lO484c+DbF!NJWB*BDZ_*Z|JoTS~Bz~IfBtBPtY5nFnN0ovf+Z1kiUT= z=!~EkG^HnAqJ{%q0Iykgl}=(lou1Dk&YH-HL4d)xg`*jvC1<+}ttWf%1CbrYeLvStRbah;WfPd%&S>%x+{elZ@bsa0*xsqn#81fUD18 z*}_tlaWh?8%~?5o8*m)N^?e+IH0N>bb_wds<e>Z7g+DSZCZ)`-lfj{- zasb1m%scBU(kxgxj^ETbHF*_o6UKr$SryQ&Rzp0~_0hkdOT~GqSIhsXb zaNK;^*n(p|<0(T}OevbdoL8ZlGbP561vrH4IGNY|prMAIr{k6Cl-^&2ae?*T0S1$^ zb8vET^YHTV3kVj>@2(M1F>wh=DQOv5IeCM)vesfh2I^DCuU9FQDz!$d(;JK?Gs) z*&R-o+vD~5JuQS_1QLbDU~zZ?kwm6YX>Sq-Is^$n6ap)Msb-*0qd5#mMINy` z%@|D%*bzb=+96ysvTsf%%ECVgez2m5=9h12ja#q5->$P9sZ?wxAgr{B%>qc7R5mV~ zFrkbKskE_iIjLfDp-l4xxF~;bMzF2o+TY_rqI}Z-4={Lgn+qg|*QirRAxykg{oa$H zy(ng|=~N01>848ylAnkPE5eGC(S0<1ztqA+@oc z^>Ps~@wikMeP4;%2S>EA+y)_)Ha0E?Ai{()E~K(?xd18SLMmOJ37;qUy|n*L8zF?$ z{9WM+m89h{d4*Sa7$I5HTrLDM=~mC{G%?(|00|>mg8saiNWkO9V(67xKT_YG649 zChfV0AzYq!2)?}d7tMzO-FO5*5HP89tUU)fhQXiDn&+xjRPP8XO`gq zOM*5=2<9KQRTU_BMxzlGwv~WzSli+^Rdx{muj4olHX5bgJ*Oipw;IuWU-<$htl`jl zoclDNi72q66eA>=9iF!N?~LU|NW7k|L#vPF^*=UOKS~Cu~XrK zRb*R@Hu1ju=H7nn?yCzNgTGUzuf|lKFqwC5#%?l!k5GaXfH&C#Rd_yiB^On~3Vh{< zckBQiIHaXRkb=^!Z;Seh+FkYJV+-Brk$)|>=?e@D@O{8nNN{}I# z`4+R|t9N|?9J=m<0r1UrCji@ep>Guf29FyF&z}L{2hz9S`4$zIp-$k%IEpZxt1(e0 z8DM8CVwJ#m05;bP?MX?ep@-X04oNT#Td!<%^x8EI^X2-lAL%tNn|g!0pz9s=VE<4I zIKS=+FRTKn@%Ex#QvxcUc3eI zu=Cpw^_r$$skqjpclXKFtjc`}l2wvwOx4ly7;`9x11x4_EX|hm1{@g;#n>p0hGj!` z5JMO_1F*y62oU#xk_TyJVJb_>r<|oLQbv~Nxx!>=2z3fT5dshh-yt%p3k4XYFQA@k zfyFHk%N&F`V{HJc1vu_}fmo4QV<$#bwrk3uvwEE03E0TGrcP;?|ErUc9a9dPw|(3) zX(xCMHVEE3zbHeGlhUyYSb)t=3t+y1$g<6;0FI|6;PDvfJAgG>BQ_-Kf`FqdRF;aT z6mJct-Pk*wjDwcFEP=jzZ7T@4>sOS^^LBnH6c7OQDE&s;q(_tn zsP4X?x;#*Gh@$s$!0xi}8Oe!2+bSTwzw<*VqAE=k{whAmk7- z*Ub&EwkcemH3M)%dq4y%X`z%}u9*}Q8C>=}lsV}mFbCg&s*`vr-<=fE#El8(91$S7 zWT2KMv%%KR!IMxRLk7}L0o^kQra7JPn{KHL3E*lx zrdcpu8t-U0M;S|7eg8Iqbu)0SW?@3@q{NPZBBzb-r$BZFHih0doy(bN z3-V#fhEy_y5dZ@83o6J#d8aDKy(R(TXl$Yz85Y?yDKP?Qhi2Jwvt?*(MG}8xmhVJ! zZEi|iH(%G@JOE_Smxub(Ha~Udi61UI$Bo@YswOwRME;PJemmes(Qp{m2t3azcPo=O6 z$4(3~1t&4vOKj|-8iaG>Db>D|O09YQNlAV!)X>9S+-~_dOoPphHoYU7vf6KZK5P-3 zSAM)NQ^$8rt^+SLPGoX^YMOq_>;x}WD6=DNc0w=qy?V!N?cDEUlN~>I0OUpBY!Ku} z!|c>*huGv^(*w>D$0UThK-Q*i7GPC^XAT3Z)OA%VDRnMRK8(!ixx02t*Y>Ys*vtft z*4f7^oiny=hHc0fBJ)6Aha4Fd`95s*jzF!41s1u|{`Xrj=;DT5%^tmy;$u3rzCAa z#{k?LAoL8BZ_i)>gM|zhF;pBI4@>9kXNtRMxY1!2X|b$(c*!5S^r=&;5B zYYef*2y2Y7YbTi&lX|N4V9lJNpyue?C*+G48Md%2!B~|5>)ABkabpf{&2e{^ki#B< z%silA9+AUoHrX$pP2w(3c<|xe|Pu!Iv3)o57Ex;9COxN?7=Bqq)Cu zGgood6AB9#zR;>w>V^it>H>JrCb0OB6tyx3Gx51s@t z1v@)uC1@wGW_|So1n3N`IyVlgy0U&aTCDX(5_QE+dg*YBuO_Q)v~rM(anV!m$qm@W z-vD>MGbbZ{B#Ey|BRyix@brgG3zArX{Bv_7cuVXJTdvoU`o37I##rdb#Dt=HI6KfI zl7R2Qx@$erM+gzTz@CvzmaQ{ne6!zXXL)42?`WYg4tBK=plGL0ej^0nW4tR6;KgUI zGffQe9KT#Dp+(=!su3V;q><0FW`+@60DAcY2rgjSFG=Qw-s87p3tJU$#RxHrETgK@l1%n%?KaIYc%GB+f5rr5} z`BJoV1~u^{oKoGh1GMATkf%W%&24hdpoaLYGyzs0U1ylLAUtZikxX(cxO`}&%r>e5 zKl0SpVr-7>O}GHdD_w!ZO_yVdqDk^R3Q@XN__>}G=NWym$vWyGz9YSdid4EIKwiOM zPp6vuAC)YsLtD_S-p=$b>PNJAGEF2mWoZDgqie;}2<~54@J5}D=K!_!+3JFoeV(Q2 z(zt-2Jff_)iBW^Nk*0*=Jiwniwh5|71A8kz7Ds9eKS>%skT5#8N+jhRj%OGb*Yr7| zh3!hd(?{*-vg&T%9mmqHrmjb1AWfHtQAAHaw57jDM$JA^9Mci_w)(U@Y8R)8=CAf~ zn8y@t(=3^DvDp0 zWg)MR#wS{x=}S{|f%DbcOR71eB^9|lU>!m>higMTP`oITM$XDs+Q^3r*WUzp+Nyd( z_*CWimSS5Txp|Gl!w{`A+*{NNJ8Ob-5F6A4d?bxbxoI%xyW*gH?+DfbmFcGv+KWR2=8-=iN-z&Ul`gm~fJG!4kq1+-A1%K2Z^pP)_ zHUbX71n2%LslLEe7(zv(Z=^3Yppb~BAXIp4$fW}pW8-ig%^{OKEJ6QiyDj~r<6c2( zn*b&TAuzgM9MR2g#Fqm};^q0pW-ZASz6Ubx@HX818S(#HQatXppSj_ItJY1i(C3!N z)gC#=0{OGb*2244XT~o)D+7AfbF+FMsjhaW3Uv``D&sT!dg1gI2?E1XDep=mKSQ_YsJxZ#RW(`q;cD4g+% z#`RbT)=c>SX(7hnj9{_0sux-iW{$~wOTTaoBepsD{zNy|S8b1=?cBRWYh|qcAMF*q+-!U#*aEG(GzoG#h_IHx!#~k7f`bI^FBJU0H&7NmLYoEol zA6_W1$X2XzVO26YD-An%}e)5@#EP9ywUg?C)&y#Sv7F=Mv!}PUHxdVKe5r$j?a*RCRIkWq& z$yXxDJWlSuHy?wKBD{GjX-47|gvqiy2HEJUJ7&0luvO1K985_D?w5DciK^YZK<-lW z)LnJ7jaHR3Vw`4V1A(BzuPS#E`47-kDkn^4bZPndFU_=$6Zneb}J;rmg^G2j;gOa9_{<~v7Fe}4N_o&2N!}fh`1sy~?)i<$jFhwhv zjCOB(;2Vi^cgp8ZyEyLG7G0A07^O^t&)n2273z$M!f>QkxI!!*@aBHuEkq%F;Bzi+ z*f;TqbAA1XymvTkL!1&-6=Z$xH>A=OqWGY?BDdbUk_82TQV|BQOY~N`wIaJ^BzkV> zP42D+^TsQP2m|mai~h3xgY__W&qQ&FOI~*$p}9vTBA?CJ87t)+)z}_ip3)%lDEcR= zT*oxNz4_kzpP%;z@CpLRJ<**eK0W)#WF=QFz%HYb-wqhv8>Wm&L2aolO-A84>)=D5 zz7#_iu+<3LR+H{F7rpa6euztz-+jO}ob!EuD9cOAUMiLxCUVNM)L4bXFX{&8b(r{B zQ)B#A-Gb-PdnnC$ir_A=dv=$?%-{d8huV0!c*1A_XQ7i=@qnND;;(bkhJdG@KTE?ck#klS)pZ7t(s7UkSHe z_p6mMiDpl^dm2%HaoP@Z5xiB=-3u>&)e#5nx23jRd7=2~KQ9`k>G+>ag|b2xfg!j1 zOSbrE-nyeoNL9f1;w2~twpg>9&i)-u!*hO?i%`1j6K^EBgjoecQinA!>DIRh*6K$p z9}j^L_xg}>z;e}BzPTH8&)=m{QV9K6TX0L&(TBmG^Hv_&c|K3(%XOEgJ)qzD>{d&C z6??-QZ_4l|)?itvt1holj-{k}_ZknPo==^x;0Wk``e;Re3n4I@Fu; zUxHje8~s`>kegmQTG4GcHXEAF7X&GV{VVco&E>iLSW+~hR9*l7w;43vkvts#lRr1- zpEXH2{sc`em3FE&`EO0GJaIZ?{Ygar)-#$LZxpjX8`2VyymgRgQR+yR40o6pwbj)_Z9Hq>*r=v6knII z>hYRdF)4gQN_rMSzj{AZc=nffc0M^n_~P_`sZsl&WxKaVI~TekbhBS=6km;v z=HT`%BD3&%7Soe=i|B6Fwoi|zvX<3I3dHV9jZYeDZ@BSAFd!)R!|*$Xm9RBXp0d*< z*K4&Qd7K|aiSv?s)dQaAGhe(H00cq3p>!?R6@NL)Z!TXlS^bVXojK+`pSM3OJ}%Ip zk0h&Bi|*y(H{Vyuk&AG{vp0QrKChHWpnP<;$$z9eX5Dp%ZpjYdr=Q{!a$>puBPMbl$D#uNcTCT|*ctzLx%^mh$jTgFEr znv3$5nUCH6lXESrdCB9LNGN-Y$azmmkzMbU(*gXKWa&>KUVVE>))v>wO|{dd^IRD6 z;vb@>i7IjT+O|qvk+r@#))-x#p@~SklKjeuhF%eMsCi#-Fj!LBm;KkdQH^$25o?v9 zUiIbOGini@Gh6$_vKRm7Oiz|o5PdkmZEUKwu%Wo5=lWDZu%ax0va;}d$RrVdc8Wtu zI2iOJR>jiH1O2@M@#ZMPWi4#A^WV{Asq(2^IsSIjV|@$X3}qRM|6WE|hhMYGDMZ?K z`sVF9OQf^0lf`PkshsuOmm7bQidg#fwNF%zuEsx4(WU#=P0CPMEO{{Yl%|RMS-^ll ztyZQAuK)Pvgn=)R_C)5Y@)nivosp!N{_fX>WU+$Nw3sdIdb6ZtRh_jp(?={HK{@iJ z`$IM;NrXBv`q@w>&#vIsUDGH(`}pRTAEwM}AF~uRjg%X^GiQC=k!6D!%6E0qDrFB| z@Ek3|P2yPBlH-2JEZBiSB#to(MwoCs?0TA}%Qd0>Ju<(J zl8fmXbwnH(z8#7^``M~;%(SQHtt{MVbWus`V%Aa?NfqW8lfs))BiYxzx-K>Quv1Rf zmS)`hse2@M`}y;qM+_=jL^F|LiET!=_uDeEf7N)`{bS)dAH(=_CHkPEBOb5bvu;}Q zapu7H&GrI=ebChOeJ3R$g>Kv#Q-~!G(#xb3s6A98S-cK3L&^I_;(fEP>RD+nO0G>_ zCAx=8xC7+{DeE1N|NmNdO{q=EqO$WE;`w4$S7;QMx5{JLCg;|cLh{`#yE0jz>AAml zVq4o`a{z%lAi5~i#e+@*7~b!0ev|pkE&XU>V^;S&okk8TeK)OBYoey5ypNp4d1NXl z=4daw{><%x=pBzG_UG}R%6rtX7Kh%v0e|(Aj}Ig;iC%z_#m7@S{l|2~-8hjh6UqO& z)SORnuZ}sNx(M^vqfpdbpDV0INh=?Rr(zC$@=>Ltgry4P9ISm2gGA?{hPyQEgj6jT zOQx7&&QZOtV?cjm4N*bmusL{X`gkC@7L|PBBZV2@o(?fv<(Jc?roUpI7sp?(hEUv# zMXT47=auZaDm>!~;eG3oO*f6K+uYvb8@ff96)C)w!O{##1mV+*52*=ee_>!@xEd1+iEC_~tFxMW zpaCB$T#FXd3L@i39|tGpByPkXYKx6>6v+>w3SHnQL?+^0u4?IQtzl3u2Id~;!E{2C z!Xguk@<4TL$H?Qm+Fyp%rug9XjoGO*iKR(Pcdo7!JmfKdiza8^%3Dx~xDP&O-aRrq zJeU3<&c}<^HfD7AeVg8?gK+==xV6@aaL+;U*GxH1J0 z0H6E*aQruEo3P+FLWq2s*MQaf8yC-yaqY8i#)?`=qQJk(G#t6i%>^14OGDNFU$nFS zW<{#Mxl|3>!{1XxZW-%aPIZxFHA%J6$BwM?TzLn7UbFpK2*^qgb0o}*r3^XOUna|w zG?H8}o%hkYi=s9#)HD5iJu>EQia6!gA9QiC`x^jICby4*?X%nDwl7kycwjS`Z8-!q z*%gjEx@i!NB@p_7&m zS)oM2>c{G}3Ftw;yx!JfRQ8?A{YDJV$#8$iuyMIOs=Fd;d;T9a596_Id)RU=vNo=l zlVgm8PIfNy1v!4m?pZle^oV(PGE+zFInsi6x*r!s*Yn+E887DbfWjc$;B&3w1$g8w-^4TQ*$WK=;EauvU zZC>+Q&!wIE-_lo2N6)~>#4L@4m5p6`3w_@%88T(bmLr#2o_qxg2h5td>T@`J4p8y| zo{aki2-ZkpRvv* G2<`xUL{2yW diff --git a/fonts/KaTeX_Caligraphic-Regular.woff2 b/fonts/KaTeX_Caligraphic-Regular.woff2 deleted file mode 100644 index 75344a1f98e37e2c631e178065854c3a81fb842f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6908 zcmV8Fb8N1fhQaGDMf{_aR5Q!Ty=u~ zF9)2+5IRGd_aY*eXu*h4iwC8kb*{C_QN)VA7RMQTu+u)>xr{eg*P|+Ht6ytXr+d(m zZ~p#e2L!$$0|$%oOtI@cwhS2;jT&TD-BQw*ROSFERP599O_J6$GcUwoCkE!d0F$=B3ebZj) z%u2tl(MPUHcVnr%0uq2j$ZD?mW>&vQa*^&_boaZ?MJ~Oeyzo++dtr6}Y?ubX02szi zP*4Emv9VMKu55x7Pupj&vGqTAnT&D>y#d1ekyijf!(aEQSqT*TC&1j-cL)Ens*}5? zPXgozu7BUTz|2A2s#l8S0Ji^=-i#RP8zmtu&neZRA0(Ii3yrZrSlxAws(Hqkb;`{* z>R>b_>h+hM-@KF)45>S=iBNAa{5HRC7)rg~bN2%<09URSqJ=Y{XKexK#T$p9aTxCW zfMVV)pb*Y6X;Za6?`mTJ+yNk09iWQdW&i=IJjein4Vw%ws6B*-E-71rPx9U-XsEPF zmm?rfMCvR9vKSm8 zq$9HmqSC~h)zlKsuL8;5bO!Ba-LHXeIRiMz`dc@Z)3MNyNr{1@gs@BI+wX*usD~DY zPbI0rltnBWa6U%^ibIti;Oq^dR0Nl(5D1CA$jm7K1rY25IClUJc5L*Dj!LVl}LP@DA-7)NFisBt(l7XuEUU)kCh);s~U%Lr_B4Qz@mcgX6JTs?GR zquI!~$-qH^+!ku^dIm1q5=7u|ekQMzc`M*b@!WE016~Afc1}oVh}5E{0vI?n|P+~7zu3sKt42i}YK>7#Vt>J#blPO4(ls}XZP(i&kVgM|renp|k zuM`>VpVR@eKX-~SBuLUgIrRYeMKe4Xhju*60=Zq?eJ{e>&aRqV9M2FA0O^;w21s}o zrk^+wvH>P1_M*uX718dVBO;=F7ZXsUtW_mc_Lfy0XYLTOG1DT;#>T{U+$K(n8qJs+ zU-rnl72oxW-<-Y!p>G*9hITXEAZQZb@wTX&1g52vWZZ;F&A{0J3h#omqk38k3uZt( zDz8rq0W{-PAelERFf2+PbrY9^k|7cjCUXWY6EPQ)BW+O;aJ5R~$vTnQ9j#J`stC9- z9&_n(D%j|02cht~kcj~r)ZONOgejuA)uJzvCZ7Ad#st(&+{AyUv&GoUSZ59}Y&6;o81%yY-c{dOdBeheh9b>eAvKUb2uq;Ac z1f*r^X9Ua-AiT{1F?D&Sf^wd8lg16fMcJUlf|?X09Th4*1zTb#{KHfWPChmR8h8S^Gvowg;Kj&N zTItVfHH&h zW_Ap`=D)vMNyU&NtN8i8u+ph1Skh8vN>25-WSLmb-Yig5!|r3;N1#VyI(RIHaSl&T zY9ANFc=#kzy0jQ_vQGnx_H_Z>A{Q`*c+`~DD+HpXV5k{)PzEl`d$y8APY7^BV#VMQ z6h*7EkJDIp(Z}kalQaqY0q=*kT5XnG!}6?e7;%Xd%wU%If-(((YL;F(pi2FYn^kmV zxL(1?J<4{rGQc9rxeu5R1*pg_G26GfcdBkhCgET zp9UC%7m?xl_tP5bzwmNbW%45qd)}WEv9qs3l*ydrJc`Gt7oz9kC_Ur5VS1c_TosFI zRa#C`^HAmhax4J*Cyv@yi3G6!r{qQ^DKONVhTH0R3s*)1%}1T%rpH<(feTxr#D;^qxpXBbQBfwRvHVap_k85D>8&}5 z;ytfkPFGl*3S%|*rwrT2i3s`3QZ8QO)?50ExWZgf zD-Kx7%J%~*G;oh99SgpoZJT*=mzq$~DRK#88K${>f;yfWY$A{+wldpf?clzq;M;gJ zp+s+yPOC*Ls1Ih<^ieJG}N z@t~-V_`hb}7Nbro+N!urzqw#1ZoWj)?T4lo%giLb>9Dd zg=pkByj>PpRO_J`BuCq<+>_T_dYlZ)$lmT&YE4;J-ecRcC~Bh}m3ngK>eyA*@?3hO zDAS5xPV`Kc_+cl~XGc%gx&ejoHnH}UFornXV1Squ7B6b*E=~_6Qs*5Dia(xHWOz%i zLtW6!ZZ6aVCF4@_CXCXRCI@_NSxBtjpQVh%?|^He!sZW?!?rv`UT0}2qsPKH4G!u+ zKIN;B54kRF+VO$SH{#0=Iq;_b5{ZUIzxt{==TT0C)?0ySR?e$}L_3IatmN6Ksa9U5Du$7~ErjlW#IaM76x> z9le1qqFy*M!Hd-wM_lqfX1(r=!sorLFGFuunypI9cGptzpmq; z6{iqo^uO?SQfdc=Kd0JiJ75D|%0FY_YQY>K! z9j4kSPT0~}NvP$iyfTb(O26P=%?gw6=( z#_Cs;R>aM4xzS7pSCj%pBdSJy!u8`bf1xu&`P;@mcd*4%Wai5$`rv+3b8Sghdq%P? z_0o5!_9bHl4TOb|(7ms|302$|d0NTns;EKrEY;9Z{j9p3qE8EeG;1}={LeOXOLzGX z5(tF!Fi`xGsJ;P)f%~qPQJnlG**z?X!!B3fOuO_z*AG>gmZiy;B?viQ*xSZ*AGhtF z_}OWRC`{1`3@vO~&z?VdTqeD70^68Vta4qGTXqkAlo0rLZw_Xj&QNOdA4p88VNqGZ zX&V#*E))CB=31AN7Uzk#>r(uyJ6$MI+evYmNXq|NJ{r)=-x2Tq6sTADdL5T?Irt)^ z9;kxBiDa6h^avLkJ9av3Shx}A6XAz-@%z@dx&ri>!i>>SI%DL0Hq({Nmww7Xf@8Hg z*~d*MyjB%M@#uo6%!HZ*y=a+thJCZ6N5W>}(sJLG#uRsFhkUtDGIaWH1i$m04codW z0TY8ERE`XFx)K7j2p*YmYDSasqP%y<-af@Gi(h45VFHZFLWM(8g$cQ_Z&Dhe|5$G0VP4veZ?b=0ZxD9Bl_bS#@gyi3QPI8G5 zO_^>&9R!-R=Y#kVelpB(zavI7geJM004o57IA!%~CrQwJHf4tU2UTtZE>hKW=I!C% z`N<%^-@o5`hOjU~QCz5Tuqrd*!$nK_(?@Ow@|kqIIJwSeM;QzSrUSYa%jm2RLeKk{ zk2Njw9(mUnioCT0X#B9Xt#=jz^E=Z;{MQ-QrSd%0`0oDb$6Na2ht0o#iGbmSCsDYSF!@(Bg6KbXaBEkPXcO7M4G}Bnlt^GLXgoJ;~T%V2F1@Vg1Br| z0kh7l-fx3>sv-^SNE6Uk3cxkCDSoRo;|ULu8Dih_V-@}%>)IaXN{qw$pFpXTn;S-5 zmkF&XUR7POId&`Iw|PP4?|hPj*?lIYX0oUlQ_4Wb^+cEsX@1}GVp_6dzv=>8?)3)y z9i>HJ@uBk9Um4n@@$wF?i&5TGxG=O>Tq6F!zTMlmDM8A{A=zkS-sz8GWw*9aRDSXO z%26rFVX(gs)aDB^jeGqID97&nygCfpk3`wZc!aF}7VzV8&~;}u+0O8E?~{QC?thj@ zgVIv9W2XEde?+-xgqTdf*AjqEPsobI(e4T_Ho=O$S?s*xz`ee|?W2&SbF$(i)DHqcN-t^IFaoXDbJ$m;g z$9~Cyid7_ff$Efy@>6|uB+s39zb1|HWPUDr8xuOdpU!@)}e3lsV2%0cZk z;}+A@`oKI4`VnRgvi;A@BD1Y~?1>_ui6IYy@3TOl0IHfrc<%vYlCjdK+1Rfe>;cJi zYG>GX>w<4*qWR|wiw0{_#7W*Q`wn*)T#~r3E8oVAFQzbNy(u$c!cfjew*}=fX}U@0 zv&^mAnDrPnH_su6w-@cM9w$l?xZFjFEvdq>z(`io)RAvN0giSmlMERp%{*(L`?EmG zjrxsBsE>ZL&`MWe&LGFQX^+-Lr9+}%K7{Y;oRmZBah=q9TP)XRE4-xN75r}K+PC3` zqjDQcJKsinv(aFGkW00|zbJI`22b^vlG4;vw_98~PLpvvH^%sD(|rL8J9TEVJ}6+c zGGJ_PetSs5hN?`~W0lKU;aEg5i01JJ3nLuO~JGjek7<2W!ey6w$yR45g{R{W8lyrez_-r28_YB5LT|I+*NTuf1bl@;e4xt&82kTjAbdG{)gR2NGU z9V|cRaATskab66|c#=Q7uqknJUvyToHtN)fTEt|yKU?kes}N&8L9w-y^;y?dq)62m znBeU})(ZKgc;>;hF^+he75!}FCodj@{makaAJ)_XRZz!SX{k0@7rTYUVbaEHviJ$& zu&?YNLV0s})vcF44dv7HEq8-2V;rt_+c%xDb(_9HB`zKzajG{&1_x=p;=WL4M9%(d zq1s=g6$=y02fv6OS9D396|~{Gm0_#Snee-9F!C2+HtgnvbT56w;j+_9b-|=)rYONQ z3~KT_7B#uuezSjK^E$)YOx`=m*yshuhVSPIxFZ}<NKwTQdr#D@u>5alBOER& z86Y_dk6)KGqpOBD7UUKV?JaCsSh(8JhQT^9l5tx==;DRR?)U7UK+S`Y)UHil<&j*) zr!vBp`ehc%JrbHrsw7*^fvt-td{u@(3G~nGPkBkOE_jvxBT+nwE#_nm5arx~aywC` z$k|}vpsrd`C!au|;~s0c(ww=X85_?KpfvE-qSBLm7B!VaaEBGrjWVUrZ_I@7Svm7* zAibC|5PQvs*8jbg*@ta~1W}w!cYjx-KNLXM30~$B9*0f*~*9!c`VoQa(BUyB6 z>cM#BL|OB~ubY}v(iYV9S}>7NW^owABN83kl}Ou|Ih+~$H5x~8zzqK9{jPUX~H|{Bqt*km+SQFYc4+C#AnixIm(Igk3ouVbmK0} z;W&JsPbL<(RM)Km*&mJwVQx5p&z7RJ#X#SL!A_5himYSg(A7fb%Ix>cvj{c=l8OI_ zPA?`GsY7cS^|)ENDg^}|fO&K_oCxhYk{TB+hHUrAqXX)&bXpPHmGB?IuF!-fMx(Xj1@Z7LYtX7*GKa~9YoWe#0HD$rG`)06%$wu&iQ#MvU0`5~0RX^efNUa2 zZSzD3+vSO{Y!4?QY^R+_OTUV|PKgKEAqv9YjP z7^8%(Woe3At!^D|%a~&V)^fGr0K+B?$7$kVv{ew=IR&*I;~1NG)Rd7{gHklieW*|c zm$aDmVy8z3H=aqhT7!E5_T;7GwQJM!%3a>py0xYxUTHYW>>iA}9j(dvs_lZyX-}+7 zoFf$OIk*nx-eB8}bhQCw`;`)c-JI(#jK(22GL&^dfZskZ8U{ zZpm?1v+{19?dAb+K&ka>49`*k+iqC7Pt2=95j`a(ok#2TlS`#p!{thM?>5Fc3f6J| zfn7eOSP-@vO6|dYa~gM8mbvObT)Ued#WJ}*oFe}O#yD*{RqXQ&)dcl z>#WkUD+QDFIIhLYl4U)@;goriI|7?oty?vf+>uSRrXYG+fdBZLWr&xm8$s?~a&)S) z=~n$m^kvi1(eq*8%a6YRMkeMG`n7EW1ql`+lwFu`5h6t$MDMK{E%#qrRLTpuzU~fy z;QaCn{F{BFJ^;}F?i%uYGyh5;Aifzzx)E&ofgNMaOcjRa0;hZ<7~no@b=K~7zvI17 z4mHY9J&pkzn%F31$=u~mVv~R^d}j6K1iCxXAvOZC{a$!SER?`981pokH CFgb+) diff --git a/fonts/KaTeX_Fraktur-Bold.woff2 b/fonts/KaTeX_Fraktur-Bold.woff2 deleted file mode 100644 index 395f28beac23c7b0f7f3a1e714bd8dac253dd3bc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 11348 zcmV-aEUVLZPew8T0RR9104!7h4gdfE08HQj04x0f0RR9100000000000000000000 z00006U;u#x2s{a#3=s$l=RnhY0X7081A=@Dfj|HRAO(ni2ZA6BfhQYjK}92Ka2^Ov z0o}VqRBm=p{=X$q8M1cpbPUxS0!WG`C@4;IjHc?u&;+W>o%jXepM@BXgT+(Np6`yc z(p7IC8)x~5s#)!;6hBM!$6i|TH+G!ojgVxvwMV<>f6hrZ$wC)-SGcn~DA9)}RnL-z*RWekuPpCacmiMm2|#%vBmjodga!vtbS#zLV>nN#tH3xi zx24vQ-W{9R6oCZDJ)7svwFKw8dX5Ertxp852kD0_jPpq2rl)~lVfgktpU>?)kvu~$F8*Uz4iS< zmZ`8fx#t%{j6voQKRzWm;NI(ozQ zRm@Vm{LZwtM0X_?gs}l%&k&q{rMmnT*ngRw@8cYK!5!Jtxe+7lX0q?RCzcT7q#Hmo zE^0*r(`sIkAzpk%0rZDr=EenWnI~e@!ZWRw4&5YkdjWbzW}AA-v&Fz&U5v^$^*x^x z9D>=4oA;0hM2CEuwmS(iI~>@Mq%N%>10q;tU~LSNM4`9p(1S0Dl=;`tIgN5W8&hou zPvN%tJA4SbyjVH=tj?w8eUoobL6Wf2ZzU|Nb`mB zLywc}C%gcA(M%|66)j*4 zN>4qgxgPdPQyNp}{kMs#qQYEb2*2o#q5yL_>0DrUx>q|qT5aF))`^^cJ(QyK?sdw% z5#qW_n-;{pfuk=23r9`Do^BO2Xmd1xLk(tW+f+cT*Gc5gob;BZZcmO401gJ6ok>~S zr*F>a%7h)e=@U5^>@gWP)+L`j;MOKn(o>Y95bLohJz|{O74(Qp)Pk~v=`u&7Rz~5H zIz5}?SCMA>K}5qD1k9-?JM>3HY_A9J#M_dWNQlB++g$JUDn^)0fD`gdC3@zY8lw!H zYeg}GSS#YlJSonktjp~RV@BlFsl@t<%m=O8(LX z6y&&L=2R7_DC01Vw*UGr7d?L}=bU*|n1oeZ%4#CHW_$Z670 zH|hjzE@6De0$j6>L}KJGrL_Ininvg~+uMOTQ;Zpy(l=72h^OU+ixSHSHRP_aEKc0k3BsMrY=JELM3 zRP2g6(GzHr7J7vu%N1HXL>pFJOZMb#q&L_r*sC4(Ngn`HL^LmHSz7nGO9@8+^fgxb z67+4et`qyTi(?%L%ignifXMoovo zDa~GAqTo;v$#-c_OTXhqpS*Z7mM%vJxu*e59{jtNVHwmar>RU!IN<7TB|7>%97?^a zPWk!jl4)mKa){in)E>$tQnAj8*x?xiQxnP93oWKT>XmeIEMjL@w_Gj>2HItrq-7^` zI2p8ThlX|;F%u%Vm?8!wXL)++7IY}HCB(T+?FU93;}J;g zL>%SRb$ecz{m~HAL~75Iml)RFrUf)sm)>b+u64tc|j$3wC$s4>ay3Sn|4k~+;9d0-X>U=pxEkpL2fP$mdP4u7ID0UCu+Cj<=~ z66q5(6~H3VA`a#*8$nq)teS8S?Q7GE;LD$iX58Wf=pk7LZYlYjtp=j@Sz0 zfV;#2_A$UA27cUojVIN3R*e_ccufen#Q^sh_yc-_Pc`^bgKstXNeKAH0KXU}{v`am zU$;w9Ef2D*c>W;Xc{3f+)D#=*iypg8H3a3Nk)h0quG04cx||21OQ86Tlj+4iUT2R8 zo$pTh(whLV-@r*4&-Dj8j$14?y@E8_ z3u)|cq1PMWk8T6VmXP8gDDVF1q}kn3V1Yjad}-$aba;m zr!kQ#MD8v53!c31`Olgkj|rrt5*{ZhG+q)xY0~miDzUri^|hg16<-KumAQqHDgzZI z&o@UE;IJ&v!=)IqNZ;8R>njNyi9S+EdJ$n#kGVimbQ5usVQ)+dIf-8)m8b&1IiNI6 z2Q{Lw=K?#iFr`???bmT(yktyOo#J%U?x`~$TeA<&X0CZ_rP2C*+i7af+&`7Qb+*9Er%^4^6VIy^oewME%bP1f=|h20lY?Ih{0lS`T~|aAmI~ z&V9)5_)@OiQVRSE19I8nz(JqNkarcO{*R`3bk9W@C}nQyDgMRZf8O*3e&$1oVJFU7 z&a^~u8nGb!0Wz%sp6^!uU*lv^C2h5%rwi^CMud^h#YX}irAi8ZWdkbU>3b(mtOn(w zEN{Y4dTFF$s z3dn;iza^VJkQZ_D2MgqINxxJoD$$$d*)3uCP6S zCX~EjSPa*2W~pV2nzQC$Tz{w3{)SNG=a=`vu)2vT0PE#i2p6PUbrdfIw#!!4x%)`Z zU9qajna@(YNplbxj0a`{t5=l^ABncoKifv5k*JI;Y8lUAq+(Y1{EjoM$hC=LVMwb)(vzMiFM=CFeHy z`wM|=yDabV8I$TfVJy0NkcRfCl0U&(1OqJYDS~kt))t`GuY$cl%K!WGF zk;t0Nj0R-U#vkgnLTn?q3#heT{!rfJk|lbU9beJvgg7#&f05aj2k~z+vfOsOaf8if zg*yrB@^$yxr)O z85L|=+UF2qT;_|x`g?0AQ#KvNzM9uU&%u8=C2*t`dR^}wmT?(%Efjz1 zqV|ZE$5q{?)^)7Gyvf6p6P(;?eAAfV8Dv?TA0Ae{yvHzO5U-m*r)3*bCH_&$5J7Dxc7My#z6S!LA2gv4 zqP>$1zvG7+yA++Pz3bv)_)C=5* zo-F_$yDw>k$9T$pVvW4R6hIQvjejViY5b!#=_Z2z z?hjRQ;O8&x#hjavbVQEct^RLIweFBJ$UdWHuAb@;Shy7DMUo54~yHPEsJn9 zlv%M6ffvxf+w8JqF4NJjQ`+4lIZ3Ehvm8$R5#Em@93uzsa^*Ys?0eKCuBGw3yKPzx z@2IO)w~NWk@)o<1cO<$}vh$qOGblK4)(M&WmFb&pE2Y~z9T!*@wF53&AqXJWNnT=N z=mYs3MgPNueoxXV(bJ&#xk-n~zz9hGV}bVcBAQqg0F*!unDZK|6pO#r4NU1+22Te? zXh#n%itXb9jUTRbP8eMIif=bcIy30DwW`Igfr4WcAu>1$blj13hHXnXo2tXU?Ja}=wMVGv>xRYnAAlcF>Xem7r7=A1b*pnc3{jQ578{wO6BQ@ilAsRRzJ814ql6nNft9pRxGC z-HbYVX5(gxtz4Vp{0Ff8hb#AxN4}2LmKA}KyE$+QZJa=9&R$}ldVxchXdsuW%A%bb z4w;mcz3+MKko+#oN(%zd<>VL+deXgDspQlQjGQ%e^fyAkEo|{DdAFPwe@M;HVaBoW zojyoHabdHb-(_i$xu*_s;^*I0Y>d6BYc<*vyj9~ey%sUFHg}zkh3O?Nh`rIwGT8SZ z%wA$T66%{{>5Wu$@llJG47_j2m~NMVnzF+~1&2zrCR^sAj&>e(PYY`Ejar45c!n`| zy0>yTl=KA#2hr|
8iJi9&VuLl!D?|!}g_M>mOF8Np9hD)!Z1Vi=)NUxj~3huD& zyD|QQ7aI3(({H9Q#J{MlFEJmW^?D~ilCv^kGW^DwJtrX3%3lmPoqYMX$D{1PT>tY- z7&&?qIxCZ(mgn?cQ!37X+$}o(Af39P0>$~7j7f4p+>@Bi9aIj#bOl6-yFQA)naIV7 zp$RaqtO$JzbfPI|iDvvTz%%DZQ;3nI&&ZQvm|GrhS*E--9kMD12pHQ#GI%oy(ufJBQy}WA%+Fg zb{2gTOV|l#(Lp}SWgvO9bUmv48C28iNlXJO5*Z7kk&Cq+N*F$xAJ=R_wbAzj?a!dz z-1?v->KqkvLsOb+HZ+If1+3D6_rR|Lnpd@k|!GPWpb*j{dYXDsT;!&wG%w50@ z!$X2~O&VXQJ!?yxp6*gdc{-qUj^BC*;N4J)Ap{)5$EPb_8sZZA1HK0TH zdTmQk%mOe(F9JU#xBiL!jtTtjOY^dtP;*s{(b(A-qIV`0!Jw}0_{d;lEa@IU>z=9) z^uB3N7mQcy+b?ODY%5#hF(*89hX%5&Euu@f`sUi3jG9dwZF3E(gnRk33%cgDzear= zWK`GHf`>oYT;+2ubmPA&_iFX&PMZSM_+BiZ!Y-#A)*YdckLV7A8r~8g&K+l_Hwyv=a@c>BAIeuPD-ZnjuA4f}pR1E_a3AMFiQ8NasIL{hQ`(;ge= z4?i+&@?@`uvRXQbQl{QpgQ`9m*KK&^Mj1?5Lt$8Tb^d-$Qa5ws_j*=s;2BhiVj`2k zxMy1n+lpghTh;B*nzq*572+(t(wmG7Wl|D|yJHKZNnx?)75o0Ad8(V5Ok{}KKeZyd z9F1<*mPPOxt^jp`MBXAna0f`$#YP+b#`o2U_h?M!Vq&T4&J5gHzO^~h5?NZ#8>-Om zZ~cmMsXj26*%22f#S87gEGzj64&|vZ5^Hy9w>(q%E?uCpqGF;gnP4{b;+~MrqA6&d zoN0?S2EY7pq&ewXKJM-9Nl$wuE%f6WBQfzzTb|g^m1KRg?R^}!y@zTATAup?28~xP zr>jSbAWtz|Clz(Qr%8&3I0qROxN01)nYeLhc}ty!xV80)dQYQ&pm8?KtM#e|t9G|l zZ!0JDNMUaX7IE{WMeu~yU5Tf%7mZKVNsj*_0&_&dzdsiD=4yR3z zF7cDlC-JBYm0daq!H1#XmXX-|%XOdzD?)qcW#)^sJ5CXYS|P%wsFAYMscIlE*@=qw z4>eN#=+(b;3UPS1?#5tW72J+)Bx|IAB2@mhpOGrLNa0c1jP!xXoA)mE`5t}V6+g)B zbEh1QGclhnI%a2W417rsuhJ$mvN^_Hi8-P62X~url|=r2Fz4o;XK^lWIJk93Yc`rq zyBsaeLBSRYvNWFm;)`FV@2&)87VKZMk;88Ni7{*tq7;AJY7+TgsfC~7HhwzeG$;fX z`O6_sW)s>HR~cvqb6cG)Ef@C?Uz**!Qa+e>ZV*>_P;32h$bdqB$U5hRu*zOp4P}@L zMIM;~XxTo~8?6)dFpY3#g}JJr=)1*kmBC2i@lTov$d4CMw`GoIy-z_N1+h(AOJQp$ zOl@sAQ?;U2r4hlWnC&-qjMW&#pw>ogkFuZI;IOhJ6lfAcJ|Q(mHB##476GHV*o5#Z%vGnF>1Xa@muz^z5<@=U3j7k#$?7u*F?=&_}7ehUv$4lqTF1 zdrNPsJ>_*@sTc%q?ZfNU8*X#dbvZ@h2s5b{<5(4YQwb;xO#v;Kf zg00+UVhKk!Do1#9jLotBAOB%*>3|8QKucY+D2ujP?mHgn@RFKU(1v1yQh_)s#cfBG zLTp7syF{)sYb5;I?IIZ9>Gz!J_Vs=jx-p5I7b82hc!NPVPkqBOad;nzMv?qm8lBy0 zohsY-==OIY@}u3v{(Qfgwi@O9mkuL~{IBzNMt3~idRN3h^1b5c_N$v8`>ewR75pXq z&sy^&2W%&}Ce4g;R)U0kZY!R=>g;)#gU-cw^^#G&&&}A3rVjmNYpvf=VO`kKO@3#~ z)haw@4B-`|-BApsAm4f{=VKIe3s7n!-!H7$^3w93-x2|^~2?L z&&?!?^hR~84mnDoHSQm#q;Sr*UMKBq5=y+6j;UTBXfSZthyo(fa(cYc*%fH`e!p4f zz;dKb;lpJJ(s-=|;5HyHWOj4$Crb-$cV1acqn+w1TrIH&32DP(|DfC4t&H)_+E)z% z-H0{bvkaWop(xr=RV;^=uA6yplmq>s&{9uj8N5$gPH4RZE8XL(zGkGRkzTSLB*i%M zVH6zj_o@|v;{@Nu2+it@eXLJiRcNpkceyY>!)KO>?bbFi@r_7zLp*r$14u7Cpso%R$kdP;Bd3b(%3C-a7Z;+eQ8<| z`Rp`L4Cht<-+5F(BMUcgfeR(KUbQ=vNq^3+3WyKv6I!foG>L%TA_##3IZI5}$m)QL zk&zzgt80yI5=P)&#((_kF1<^Bk%N?*#6m^d{qOUOl4wob=z@Nfx`1*g{DyRMcyjQ) zZ5_#u_}=yNJ3NbI?YM_y>UtX2K(jpFwKDF+1G10TkB`jC6|vGyAp*~02zbbxq4~wpE<5^Jz_s_ML8s)Qhx552)Dx-Rw?zbI^K^Mab%;b{;-xo>fHeO!u+B z;pok~fzC(CW@PrfPRM$V3=D?{piBLv4t?qJ4>v$dA)N*8;$No;@Q)M^dTnzSw5RFH z+ja>vgY4+ujBUezJW#*EG%)ySUwYpjgjlF*@{s}Y33p5AhyN~^WKR zZ@c{EN)N2QmF$|IaCyt6n#t;6rJ|;``qm#K{&w}uDgmd|L-$!_5)qXYzaJfMGV19>%7Mct6yNwe?$#%M!6&CG1 z$xuk^7qfk3J_#G{;8<;fLt7_ZzXo_=G869N{15jruSy_=+deVnFOrw<`mz2XSn#5g zqcE_A=lQ%kvkr!Vu^)cD2ByQjsjr79<)$SyzrXlZd8~QeFMm##BZK9>pj6Ftk#P?r zHDD_5p9hbA+MbC?oB#b)rLtAa+8g-42f5h8k?VoOp5UFH_Lfg&jUO?yz0OXZ zCeC;a)NNvt0SD~HBdYmAk~^slIxDRFo0Cd5)1wIovwp#{BQ{~R$Hd5HFEdfaKOOj% zbacdT-3R=$`Bb6Q&19Q<`-42{sryGhds0L?eE-2Na3h5GR!JUg3{Gb5Xmv%I8DdET zwD<^2Xrivi+rc)jYyaIi-w1=M{B~$2R$cC5O_za<=OxC=FclQG8wGsyU?r5g3h5ex zw7s?l*nV|22sb^_<|vv#uZ95J_omLm zKN}{CexLXj(OdCm|BDK4qjAa-$$&m{`jAZsb0qB$1RMd_d=CC=ETb+3%n#mMy28ap zF#o{v9&bA|m`)eExmk2z$l_U92diU zQAN;VfV}fp?&7MH@dZCQ&uYDk>2O7d!}H@hgc)w^aTTw>32G=XD0NO>{@-TRljCI% zH_rk0@UZSq!y`&Hs}?{<&KMgzeU1P)SXWix3O5q#^^4XI6{J1LJP)$uVF~yyBI&Tt z_*@@=;PV$ZYfB5#p53^)O^w6;pFYpNAI0Rx(Zvw3Tt>|`JpGs7F?YgmkAS)d3vLEp zxBLom*$J-PClkCMJoJF3R&`u$rsLiVgc=JE^zy=Hj{4ghnQ$VMqjg zg34RyZ}QjgxDgZNhp0~E`|E&z=@IGaeC{B6Zl^k{cZpi@MY039K!-I;Z0{#kJP0v9 z=@RxjHK3n%^@|GuAa5~P__^eP zd;h*2uDsG}WY4EFbAVr4Hx@XV?BU#5#p&LhWrfaI}BfRk5*{-7Bfq{eL zh_Q(qBwxgNNaRpNN9%*fST1S&BiSX2Y6mi?jrKr5neJl>Wwz^#4;e!4fIG8=* zA?I#{xFEqN7f5P(?M4Uu@)~$qX|;_B5a;mA4M1Al?W?rzp?8T3>ug8SMGCEJ$xokF zv1SeXM32+J@{@gdPz9t;FT$Yb%Y%iWMq2RXDklkaAaJHP={UQNsM~@iq${WBHB?vf zezJkz^!A%&3;*u&Qd1gMUvl&2T9lVE<4@U zrg+QCe)H*w<^>Qg#90rx$mpp=}9AQ)yi8iZz>%K0nPCN_|0 z-PY&G@}KB@Vy0(Rst}wq@G!&{GG**Pi>}S^qglm({`;2~%S=w+ym@DdDkI7~h0?|< zLHqB1rw-F`zxrn>WEe?Z&%*LeNuYMccZf%wZ`3W36uk%B&qxPQ_|lh4@}8cnvSD+c zm1i)md1fs$-#(|Qi}oq5?8>@2adLjykwyISo#K^yTT(%_SygB>d%)K2oXs;`*=Jw` z7YeP3=TEhcPaEtJhOhaJc;ewMcV5n4fr3qcM0R`Ty>C)2pNKT2L#;xktUjE{XHhE2 zc;C@TMDZcZvLNT*bDP!~%UHcWi?IpfY7}dU==X>`+?<=^9|&{JjFFP4e3^xtKm~>G zP;GM;mZUq1(Ni09-}ixoyylqP&z)GA^XZ1UMZb4l65hJ_34K5xIe+Hk-(8^3s$LlS zitP5t{meg-qR|oiTJ;B+m!H3f`Obqu=9C|@H+g%k6|>>xbu#4B_SXU{pSpZI`rt8k zd(SOot!YkLt%y*@!R@+jh@@G#A$+$=I(?-2U?5$LJd22Biy|-ekN)~_{)M9gY zj|&7WebrFeFrD)D)lZpRLf?*66bv=mZq_R=SgKbm6-FB-p_IJ+=5v+Z!b6A0z&J%7 z4;{tax0^oBm54k!acoUXhoXqyqY|`IhZI9YP}ib)n=%yHuQ>2{>{;LjcskGkvZZoQ z&qtbMh{@^QI#grgRy^6hSqUY?nr~B&Y)I5Inm)1?pP(e$jcDRL+MddWc%nX+Rgedm zO7kg)OvMaoftxyAEu)r62|B}-^2!XHF6NXK=RH;)WJ=j3v^`gvOYbD1u#DT}D~C-@ zMW;8VDsWOQ-qC8TR8Vu>IxmU9%gyU1}Fx zn-&9^Ci(eR%@x_QWczx#9-SI7Lw0f{O3hjz`JO3ZgdWkxje9`{^`IWRNo1&VJGPCa zPp=mPqV+h_J&tGGYZAI?*_AzgM8D29t=LXiht$xtF!rboMraR(){nk4s>7;q_;2P5 z@ryKld@cOa{W94v0{Kb(`0tYn18FVI@UV3H*a5$=%-WN`%3tU!`e_ILZb_&5{RgKv z5rpFGyl(QT?8s!SK0&Rq5i1vEY7V}@N)#dsOAwKg=Ao!a_CHa9*7{l}!sI@kdKU6j zfV$pi?~hPA#FTmuyzex%=gHv*t-3z`6f#hq17-Rcp~cL16!*K3_wb$$#b76(j6E5w zKZ2j$N9{Ri{Rv#BUIq`LePvKHaI617HGEg%0e7Rwu;Qgllf~CLIqBtUi1u6- zXVO@-7?S6`0YW-r3(qPpY+BCA0~3QNKSf4~YVP(~8O3PKWi-cPj|uJ)!@;-)HiJ~` zGHe8kCVjfS+@_E3HLM$Mn-(LM81ntqqA3{=E!SL*N5L8-Kf82 z9KvQCv6^96G+k2o#*g<0jVv*M`Q2n6_!2%go^p1c)178_^fj>R|9Bi!B#X`A z^7sOw2pj?u3K|9$4jus!2^j?q0}C4m7Z0C6IUx}-3D>?|atcZnDygVxXjRdvre|Pe zVP#|I;N;@w;Z@7WFCZu+EFvlB}U8T zMzB?gr+a~R;~(48<%7kiMqgf>1?x%Y;Y$Yd5XK)8mpUz%x?)bF$$R&@`ES|j$<79b Wg04?dcHtp;A))8;;A&?XTgw>?+~w4ijH#pv#Ou(S+JuTfhf>O^sW6;Fx#b2@rkj)P z-d}ewUs&r@x;?8bxf$`O*x4$w9`>Zp>GY6YYWOpppm0Tacj>9iMat?P7M zeq?N4er^2~ix2%ro%X&YLuBF*x1rn;Z`+whNU)8Qx?Rs|;h>c+(BThy{Z&%F@&w1yt=VV>r?H~L?6BF>5 zfJa7O#Hzbje%F80X)XY&@YP=+`+y2QQsM%pInqCr8y^i@=8oQ`C%0^%Ub%S)hpm7f zP~X1guWio2E3?dm>`7kJJ;Y@9;7<=tN!J0-=kELQvE7zHBF{AQTJXu*2qOWdrif8+I3s)9ni^D z#kH^$TnqQgMRvcEbW|EmSS4OD{6f0G(M!k6sOlQ}% z*;c`2gC|~H&<{<>%StNx;=zm+u-fT(FzM8F}SU;Atw&ec}8x^On? zguk%zd*;cMfG;)?qQg7bdxubOzFO?ABY$##DF5;Z=IOnjbPqK?S6Tx10xv3_0h&v_Si)5*QLc|Uv&m&^8kq`Vx6HZ+D9Z7L(kv*uQ>6qJ#+Jatmu{(X}0MRx#^&#bGc zv?`WG3*qv{5>wAp~q zyeu*@;mYg1NfecNl!;l@@q>)gcrr~@76o^UA>2`t_o{DomkZSVqAFeMxO7_*+TAVXR&@vmp0(C^bf-eNCzWa`UGR~+ zK|`J@b=TCTJCPppUWG?T`K=1ohPbYe!;jXBud>X;YPhbWAbVyHKOEyqmr!uV5FUQ% zli~9sP&M#E40D+q@0`Fb04*_r`_8-*{Sa>QzdC!$9cdnJ?J$)PVj9ygIf297MMbSw z#%I-d@_-%)xC|n2UXalFIg1c@Z1baqZ-x@sW?&hp7;?AiN&~!LC@{v*fWo5&RUb}H z@GsWqBt#64Xe~q{FrgGPlwpplSfCnKP#vqNfi={`I%?rc=L8&tK&a*Lqh*RfZ%am9 z)|`%MjKklToiz@_KMz|Q0j9#*ET~=|NmXJcbf67gQ0Rd|9~1_lFa(7WDCD591qx%( z%p4TQEn4^-(n2LoSjn>_71^A*xdK?k%NXq^E@^p;F+KmU7RkDHP`SEaWfW{N|hXm~Zm zF@PA!5*Pl`N7wgex`x5E-}&5#;{5RM3QlHHr1U1JW@IGcwy+dlUfD~bEp5f!+)@=& zZs$o--jjQfyyKuUX_+f|EuW^H2)c}+FwIuA7Ecv1SMJJ6S}D_vC-Y9ap^B8;`D`U5 zp|c{XU-><;wdXTRzhT5uv5;X<#Yu=L(aBSLMZiSDY;5=ykTfOOF#-4J3!_p zp=>)B&`oNgTh%Y{*+vwzR@07M1jQp3RM*zp|AA@oWkt?ML&hxf*Sx^fMz1mVom1BE zR!_T^V2Y?zS^b2zqN-vkmdFc1dd}~?+Q4HyuA^o?O=ZR zg6c{!LNSNd3B)WlGyU^Zm}VS6;?&^5xfJmRi2nv&b_S+e)sg*;*yS>@j1;rm$Go95 zYi4Odi=K7jc#RtRTvNAMnzzKJ5=X!bGfI9@V3`|3-KPD~Re70v1rMslaX_ipz|jwS zvnRv}Q$#y?uTm!7BCg|jQ^|XW0=P-=9&-`W6aXEE8G`T(o1{XP0$}6hJGdl38Nm$^ z@`eHUp|C(06Am#1M;L-*3}Fc{tZ@oY{wvoRuP2O5g^KD4xy@H z8>X-eQ*++}3LRn?hmH}TQv~QdhcKmZ2{X8c8Qj7Q?$HW8B0zlv^a+g|USSUJFo#c= z!#7%?Uj*nML1r!g@NfLyK7Od{g=q1$Wv#S`S%cny?^NpV?0_(6K;0a*avL;l?BXZ5 z7eMqTs4rmxTL}tf@rF5ClQn>KajYU;CBm&krE+9l9zYjvRa;J8Usn=eH&7w^`5lF{ z4D+bNG=tHZZm(I_Oxd1Z8ES1!ciK+cne<30gJvNIl{bUEXRpoYm6`I$+|c5F&-&u_ zZnE-yz#6kT^>%#tG;yU)sU|(m-gITGy*J%qwF0 zvF`ClrurB3;(S+ce7#FC#Mdq^zw*scomvf62>j&$E<<@L()7Z|25_iJl%Xv-68Lx0 z&bYmAYH6MDOcC!h?c$hxMs3&GK`vy(AzfX?xuli;o@#wfbv7-KIRXX~h)#XEm5mh& z80$lPtOqAOZ$BE~Q)C#-z~LrG;ww}AKFh~g|H@Rn#!g|Ao45?Ikr~5B1`k%+kCY#m z&UvctPz&wbSN6CI-i_0)+_~YvcbmTRl~Z5+PV0BSdRBFqI11Bn%2XW@zx-b7_svk< z=zwsi{3nOO@1NSS`SR+npjpdej`dRFS&vrQz}42p@HIPijo z7!d*ZP8g-vup0vHdyVha83yh8iQ}uC`=k{4fB<+2i)?e|*0522Vgb~N{vvYxzIH*$$}#0@zd`9@sYKU>UAa$WoyJekvUWOG0hGRWEUQU01{_ElaicFLJ;OvYmA=bwpdi=}e3vRvoQs z2ZBc$;gA4j@q8XurOT`{j(OTniTVZ3&21xpgtWN0;Vz?a%rY*`KSVCCBkKLF%L}_UKE6Zs ze6B2-IycOjkDdD*9SUBke0qJHTAwsWaTy|jj!0ud+9h02CQa(qeCyHSJJk3s^A?$? z)Hb40OeHS(kFo&m%hW=O01m`W>U`l(mEOI&MVGS`yFNRr$Gk?9%fcV@$?Tj*KI_}4(2 zVhaNb85A?tV7q*nH?wjwG%{dmDih>>SdGNBe_k2 z>&pU>UF}W?e~FW?TWAzX%sF2@g}SwcRH~fein4lnagS=Z(G%MhZGzFJJqC)FDz1n< zslQHgX6^%bjlfsvyq=s-Qc>vHQJ^uxp;!p!Mxi z0eKE7Qa@NsSZ40#fn=}vw@v=*B=2|%I-|309^PCB0yJw<>byqjK0Lfxx%hAk3r8I$Udb>}Z zM`Y7?{p}1daY5iwJZt|K!X>oP8{{-q9ZqNCj28_sZwAU{kt_+2=gHd%-%wHb#y98b zgyG29Z@eKT|5)`haQLBitp|tm;>~Oy)O<1Rl!0LuW;}>%KJq^1_OPpBDH=v?-q-K; z&nm%avn1tIe}asG_0dtB7L6tRu=zK1>m&nv229t)4Osv;@U%&f`n;4A;@u~p z0>idTi)zy0wm?(nRX^4TpR)D5>J})5-I4RwZ99w>wk7zJ+@*Nkk{kiHEzgG}5{w)S zRB|aidoc)oA3f2oJsFLDh%xa-MxL_bdps zWg+OApY~XRQ9dXi-?)p+%lhddlq08|R?wb-YPXS!0p#IA!STb;b15h?#~x&}*hPI^ zufliGG(w;^ftI_qcw`FQ?j=5b(f$BqgZ=pZ>9|X>G}nsX zuq8we`gm$TPtubp;aNsPL6uvf^lTJ|v^2*lg=afBPrCb&ed56nZ!TWXmlp?RhZ_5- zFD4v<+>y{h-rC>uG8mUK$T0O|*%g%ps&%67w1cd`LY%Zx9FPM_UU(YfiL%m?5iXZj z%9RuoXJE$RRrt6)$atahoxzI;)htC_?p4G$?xuZE1Js_G9QryU8%d{-89fL_r$UK5${CZ4`TM1(mLOc|%n{j3ObqnD_sYQnRJxfq}C+=~4I@TP6qv(y4P@=7uu8&Gpm2486pIb8DV5*L#=F zgdsRIgy+EKrw}$;2Kg%g@(ku>oZ_SZ9dr;0^p0VKWh&cK$k%-ifqJ(XDsQC0uCCyQAyCmZoA>&ARm>Abd|!(TeFE%I;bW z7CvNKXFJ6s!WC-61>dmz2(_e4NxW&y&ZQ(Frp#e@}HIs+rhh{dFTlS%+v5WH*v>Qih zZXn+U2Q=xu`N|3b)w&-HvIMkSxXiS8&>Gix%&;?6K$$s`xS5pU*um&80w-im_8&hn zeF8ZECFvS6lL65{7)<0#>~Sp&DP6;oYUDw2KT3F>y2B}yiEwU=G3&Vu?FB1}DaR<$ z0s`el)SdnrO_V=j%gm{HNp63u(o-DhYn_Q(Y~h4ye~ByE;g(-l*zW1V2Bu^0f<@KA z9K)=dA7%G%9REnWvU3G1x_SNbE!L@ox!GMe*X3{Ca&@;Z`zVuhJ zB2P#tVm%4w9%4EW;bp`)xpmD_YO~_qaa#6-a7#I~hPaR&Hd?^gE7{M=P8wk$%p6uk z9Q`M6g--(5A&Hg_u6Hi}YxvIASQ76m2t z(VR{wVHAvar$P0vjaYl+{nl>Vb6Xo>m_G0e*EXhQ1HZX$+uD#25H&;EO|1#9>K$e5 zndj}pVgm$4`WFfQ^`Xq)7V|c4U)1DuDjr!xx?r-+V~bU`BPtN05BJaG@s;r$e%7Oq z->J73>YBm%Us_>DV>Qs!ZXn2xk5Gv!3)SWgU)v>I(`}Q!V5OZJSVUySBG&L;U!b zs1CX?Bg_3(shRm^mzpufu$G?^+2zU-kCe|4NFG?_Pvn}1{gxu9qe%AA-M2jhG{Iri zhO&!?b5G5_@I}PVgEx*hJW_Z4wX-$^Z>B4R2@3vB!-ifMH~rSO(zdAT_M_ftW{T&v zo^Z@N-r-4ix>Yull6Y176;)xg|NZEXT->>}-*;qFrTOH^{z8+xHSKB!S?r=0Jravw zdu6_BbrsSFdc1Qo14ZiM9AcuE<(XG}Z@~fvc8jW#s};p!X=RnQrllj3V>^WpGc6oz z8@=Bzf9`mqEL30ZR9Cmg>&xbiUcM#!e&2}y8MHI)6;zAn>4oq>0HmPI4~uhqFaUDODpXLE8Sf5ZN><&1=AZ9!?FJ~->g|ie5ybHRXS@e-DYbk#Xp0#N>2_Vvv{=To%C7S*U)?ce<6=t23Ryi@j;h1 z7H8EYi;l~;MIw>#g?eQ?Wpndq?e=^w^u zH}_SyN||}r)bz@#r)}Wo{(C33?09cZ7hm6E0LpShx69jAI>%vTH&jij8pa1@IUsuf zdAF@f_1Cs97JQ1UH*UnJ`u%N|+#VyJsyaLx{J3Ygh-aO7N5TJi?5r@4yorOOIfIqT z$12PP8p3K(->FQdPt}03{c<_(fBBb}uUm#%%aBPdpmbzS$x9D4b9?%qaY zex$G{|5F2&T;LB`9*Wp%t@3+jY+`lT@yo81aj=FeL5YDQxrSSieRbk3*vEI zDZV#%_^Ja>&$+1$2FDtB5nG`J^R9w^@ufVv$^43tQX@+rUYOxm;Hx=m#Gke{hs&7` zSV~J5E)UZ=GtUd9*sb9e7Os26(OP6%cb2oF`xI*Ml}DtSyt;Y1^b^5yjyNy!9Q0Yw z+Y7F~ji+zo-<@UiY7c9(#Y*@3s_23N0?dl!S5ii}xM#@lUZ&xarl87F$l6!x*e0Pj zWw29~4OO;xz$1mXtMwWOH(c9c*Ktt?pI~N95`X>q|CL=+k(*@7Aaz4`;X>fFiRk(D z;S2azulblBeF*QA?JM6tZ`&7)tlr<>b+z&7D@Ir?u&9mBSh2YVw?<*rwwaR$tu`N<6%S>2%GjM_H#oOLeZKH2!FJBHEYm6$kVc@2Z)uR-!j~9le<~Lv#GsnB zNOq9=GBEJ@i^tGLfBjsKU9T22>=kiT#?Q#r@er5qB8c(>I%S;NWDW|tZhPtAu78(7 zRBqN?r4=W-BNnFDifFT6#Jo^H^Qgn3Dv``zS!0#yv#o6WNbp+7!Qpy_ef?1?HsNY@3hUbVmroKybpi#Tof2c% zZ_N;#Ek5F;bU+!Ts0x!sOk>L)pnEho;V@r8o*7|B*+?U4 zd8T|24y`0--Vx<-ekYCV{deYOnr$5A!}Fgakz$G>>C&mTjVzoOxFgE-$UPmN53g%WUr8L<6lZllHU2B}rWo$N$u<+$`6|c(#ge{R@)Z;+u3^aw^BMZ(3a` zp*qg`*{pMen8sX%8GLPI?!qH{&4F?m=vya#7~8O3^yBcq&?Ikwnkya(~YB ziq{u0CCSjGp#3fMhVkUXQ*3X67Wo!FfOSF`+?%uwo#5CvwXTEAP;HT(GgNk&!DC~_ zZHL@ZeuqtnhwR+BV|WkpC1h`#NfmuwN|+|SSCBBsS$h~tQRLOZD2@k~RvSCZwf2Vd zWsVBP=7*5#=rU)5kd$J6{YK*X&&CgLHr#R9Lh$yX2X|SjF|6l;mxfnj`A6a4GUAOzkO? z5;jY7*ZsV6(5&27Dt8N?g&u%a+&YpifAmd3h1CEvd9{iNxwZgO9bN9s*m+-EWurW1 z)&n6$D;iLB!4mEk&mv8;TeweHYxH)`W@}dvwI$`8yR0OrcAop&YO1BdY^5bXAeNhiI)(eY$x$yQ-+6pFE$TXTc6w zng?pKIt;v=sSS{#c;O`F^+z8gB@V?!g(g^ZP7?PTf1C7xSB&RgZfY;f{+gRT7mc#3 zYz?G^7}75nnHWEuVt8FKbh_R07o%To`^!8Y9PUX?7@v|UUtp4z&-u$s_&9^~9ih&n zmxn3UnrOAxsoM(DUmwO1hH=a;V^d}n9D1ta8O<~qyO`-uGr&h*|M8&n{ZGIBsta_b z&W-PVom)CgO`YgC!?p^C2$|Lze91^%_q?85mmB!YlwgY}UU9BmVY1+}P%GmjaUA5$ zxvUlk9*RpRJlYCLfi`c9TE8^Xm=p2r#=8#BfNNkRpC0@$P{m=wf2!uW3ZHjz|3J0Y zoE~Kt+u@$#C?V>!t1hx&e&z9L#*~)URFXEX4cRw)}S> zLV;TrOxS{XK{v}&68Beyic_!s2!XOu@7BZK?W8Tv)>X?`Nz^A>0B`bpH;Ua)t;#rJ zZJL^me4ECgr8;%>PF?>MkhoM7b~+QVN^uLJ$*Qg|IO8BX<*}a0EFem!+Bpkr`W?0r zTup04Wd60m+7t~2ZK6SG_F8jqXgR83h`5`Rta9dKu0Q_wWnLFNWfQZ}D`9fs3-GHZ zU&xc6!aRR&3!?EN!#h|F#|#K-Oh8Xhw{M%94mLZVVyB9t9U+k2_YSjJvIJ@CQ{`N1 zrKp;kD-9;EBwh~-I49TNU9%DqdwkCZ)bSi^sLuOa;#~u^2i6iD*;kwZ5u%sA>Zotr z@;hP|AHHDEmwj&>Le=%W$6b-?tgq!xJC>IH1A1WQl|D$)KK!sr>~vI)g`do2x5iQp zA5C2WpaYjbIQ(0Vvs5X#eS~SrN5RkjYboS3E>1!&U%Z+X+PJ7w??rBS>{1zaLX83;V|!etuGWVPWUE7UNr`R3XM5ygG7dJH&eF3j*mu%=OmVuhyAe z+XGo|zKYJi5(wM#f^FsPu*k0CPtU0(L&5WNHgu71BVz&BLdb^1niUA>;LYd9d-EgX z5g$Ch?MOuq>^*AxP}~zgd#<$*mL3+r-I9d<(hVNTt}MEFfIQ*PO*?%C2R@)5pw*B0 z@!;k{p@F-->?37Y)yVX3@Ql+Kci)dXD|$5 z%K5S@8}2GzI%h@aI>i8R;x}!NY2y+lBJ}H@PocJ53g@0gzT9goXtzY_R5|nL4Sl6W z<&$>pFs19;Yggu*tBN`4P%h{jT+(kc@GV|O+hDd?6W zcOntn(Nd+JCa`B3YW!`8|MVroAIUiI`r?_Bt=U-ncsDc!T>nuRzep#W8pAIDpQL_w zp8=12+=6ReiJVa22kPyGd2<_H``A~|lop;j(DJgS+a098S|=p2y~ zQ>Dyb@I~i`sDr)hT0j1;wTV%vJYgooo@%aCTKLGiu%p7Q&qA*=>+!%^iSe$-^Vf-~ z8cnGNHxK5WAqW*tG`R!;3WzBAEJ%X51#)Fp$fYj(O7`r3sOLwz6Xj5=8Mz98{p}R; zEO37Z%|QB2xV8Nc(;FIvOAbfh?_-xUHMgE?jOM#U-=g6{=o@iMp`*+SCjN)GLEIQs z1o)R|U57hoJ*KX9Gq-)i(CF@}um2|s$KXjH+KS11MWb8wbOt_8`-aE!_i>CB6gEu` zOb&-ZtuUT;xm@}dgO2udRou;rUk$nvtNs~G9cA%cdKrh) zZ7D?!Q-6Jk#+TsehP0F+v0wRgCNCmlfbP|gC=!L2LVR6u6@x9sW$DvGdR}k{JoT8w z6F@iQ=E~zAxoo#en~~Y!fcwdhawMzkMae9Qd%3m=T_^@4vP~8>5tY>Wml=S&&tthY zg^T!(f)iHXOB!`g-!diVUVm35<-a}g0#)uIS(mL~#OumWaEcnS4JRUIAiR_02)`uE zE@PX@+lO9iTSjhrik`P^by4kiL1a>s@99u;yA3E2@ctNXf;gvLs&F?o6ruQ@gjDm* zklW_E-~43u5{|sI;)6VEtJNbvBQ~wm4_*S!85gWZXj@$lS^a^jWWGuJl}<_@ys|jl zfaAhRgCuXW?FE8`V3!ZFDrRI^E2!iT!ad#$a#0Eu;G`Q$>!yL@^>;61;842=T-8t$ zLyR0PhiabyGk?S6F9R)&t(P8IXmq-Gqv&*N&jn1%pgI7P@IW7?<5ICL=@%F!SkN`yGJQ(k6cq)$jj z++{)ygb@ZOl!9laAV*ZB@6qd6w;I9gm8j@GO3caBNK3$Xyw3gr+F+AOy1_QsT5_@3M!0J4uE1v zf(M~qA%K}_T}~F@xNI6p;Zuba{j87xA)rW2Rq*LZ+nS$4kD}ut1`$XI=?WA)LI+I8 zAOHe*dR#9JO#DA3mi*I+usZ+%3l{n}jf2kK_}?SHZUTS;1<0wY+Uu4Y*`B7pN4iUE zx6vcpRWoD@J?a-^S;{q_Qr}zp1XyK-xmVwK=s5MKxrkIc{`_qOumy8Cfgwm(zzhbE zpIGx5uuY2(UcLQvQrCOpVUVs`6b}`$YlnC>!wzKD`k)__3a1qPbL~Z{-2@|Bw%g(k z#m@E;U_pBkYbeJZ>1g$Sw?7u_O2LM1H1wX$pTJ(dtAYUWtl-KYtZd?l z11}G_g$8_KAcWQTvjQ_r>6(QsB0$Mq*B_i{=B|_e5%YB4h#$KU0IC4MFn~ZV7Gv^< zStj}+nNxskz)Xew-@M50dQ%oJZf4vEO{v9705^ZpK|(QhDOfWAwV2jHmNSV1Y74F3 zV?jnVPB;WO6@u;1sZa#ZoC@Rd)~Rr+WSxp&P1%~@;HmiP%;Kj?U>xo#_3AUGUxzmB z1_>xMQwmEj@3D)huT+{@`>OG=wy~(>6)Ff%ZFS?-y*ap&_&pFs{K)&8$jg>rn4kl{k%S zyq}a%ucf15v{=%?owhpZ5L&(3_t+d7_^>D~WVfY(G6((b#|8BA0aEEO@5&H_^^5`mX+714k}%K4h!Vc` zfC?Is&`L*k=#L4+Xt&_a1i}DhV2EdsZ~->nqQY|Xp$zJiC{@3D_cc`n7@_e3A*M z`bKAadTovWQ{#074gn1V3kQ#Yh=h!SDhmxA0}~5dHV!Tx0TBr)894D;C+WT!U<21_4^m--^~Ip zJdkUT83C&E{3jCy!EA@cYG2ga2VMaJn-MyU{k{hbV0-A(7TnmIU00bZfh-L?l84Q6k8(p9$#9e#j zay#&*{AZqb!i{nEIFLFLjG|^yR#~$D|34+^7{d+y08OLnKP1aVk&cmYh+LTSpn$_E z1CBKUhXx(;t@HE&$&|WJnIW@OqKVmh88hLPv?y>}N=NO3GRf&N@`?quW?!+oJVOxQ z5M(%s#o|K1?dWmCdD*X(En&UgZ~Gt_YA^Nvx~wn%5b!1mvj0#^sPUTb)=5uBiepa{ zM@C#m?v<3t_GwDBor8v<ttQO?g=!#O_g2#tM`J3K$P-v8VFqW@nKOE#jm5&c;t zImW0Q^h9U_r9`no!bnamPVbB}?({-V&$u$`JiRtAy($LX{5}c6LP8)wpoZ3I{h3ux zT*ogqj5^JCz8()fNA1td9=LZUR>1qx6K~4{4g*f)xHy7d7KIz`4CS<(k&^?-P6~p; zlMpQ{@NfY^v-_L5{sv?#w238S;()WZ13){{^i}>{`-Erb{ic@G-7jf^E_BhPN zlt>z-oxOEA@1B=oe8=`38v+X$EK-^>3^bbq1mABhf0CI666W;1vn64x!~&3Y1$L_c zr&sw`Rns0{A1{arfc3$CudWOh6_5g7nN+vW4$y%^dVK; z58e;kU~zxK!!d83GNKVCq4k?7Q#mQ{M5H@a4(ONd3j}uH_I4S&O_<-{_3=;qyDfxb zNX<}8iS6^`a5!cZg$->0)^B^QbZQ{-W_Y1Y@R%{!e1dC_TUHRUe0RL&twDV>P$sB?z9jlTDbh2`HHuPsM zIl#>vCZ*h!k`m@NM_`M(vcg6cTxEr+GZn&Ay8fT4Wjo|*Q>VRZ-K|}PTy%Ht+}Ta% z(&gX}Ko0(Z0RvJu7*H|@^3D>tyH5n9XA0B?Q7JgZq@^}e7qw2gyXU5fv1?s)Zd+f| zxAyy%&hF&q(-*pmml(6x%3SJRXhoPbWuP0yL&#bb%3+jb{?}@CA-N^-Oh~3J|MfZZ zIT!D!&Y6#3GPj6`_h8n@5L)7~tW`CE#ORGh+jxKw0qeEbZ~xy<{dX~=-FEVFj5Uaa z5^}-I=-Qv!aqPFg(tT0)7q#mdg@GVFFc@V498dEk5&%EAMgZ33LqD_x15-(HdOz^v z)OUZxpK)dtiO@s#kCJsLmeY}y0UK`=0HCkj4FDPLQ$hs98x3T2#0vV=_w+-YMg^8* zkG&2$?3gps|NEZLJ@zNmMsCLCrM#9O=MO7gL4`F{Gc{LnDRD@K$Y?U2Of2)4$z^Jp zPIgz$mUHEpTqO6C$H=$J_seJHZ_3ZhKak&600luoQcx6h1*Y&(NE9-KQlV0WDv}j? zg-Kyi*cF)yr=m(xt7uj%RSYRsDK;p!D)uQ3D&A9ER{W@Vp`; zs22u43;a9q-;mUhqhHA8=K(TM<%2jX?&1%)uKpf;1-=JAAdLU5eT0KDSVom$GLg(r zCi{oc!-wSO4YN=n{~(m`|UGnlTjP3x7sQ_y0q~qQzB2UbPf^1QIN-r zksPtboYe68Oy6G05|yysZgD911CY`YO}qFY0Iyho_CI&QMn$6GmUk9@j}32LnfmCB zO~hgU-M2fV{v<;KCAbwP2E0E8_T_tO*FOU{*4TZFOXOP*M4nCzG33QUcAWPF_Xt)p z9srb8)QS*H%d#SZc<~h#px;yGFP+ks9ucD!G~a4zHCIIZx&o;Yu}wYk#cDu}2BR_? z-tdC%!CpAMyn(YzbnucV)5?BuHSF3T6WSQTT}m)uKw3|v)jHdu2Y+;UyZm$LCdSEP zLTxwr${Dx$uYk(syIJ?@#oa#9k~@p@JCB9}&D682ts2ecCpb|eKW1Tx{C7PGv15d( z7|9~PMYdElD;q)Nq+AeO4+IzHwhy_}`8XLLr(;JJ((qW7=e4u?WQ5@?^u!(mB}J{G z3QR!ksUJg~UBJ+k2KvtXa$X*V*4GqC#6|BZTEJDuZ97~jqS7^~cKyQ-PUQ6KLy9sP zZ4u|~5IdLV7R0=PC)WYdQDSY!^(yM1$0=VG!+nxFOZ0OOTxaQ-tDvrAnmrf zIL7k)*_iZ6G4SHJxD~-*&dF+`Yd&d+wKRzwYQTpfQb_DExR8sAzNaOq1%qDnw{p^3 zii&5Q@%8V7t?RP+MGh=GAvQR3yFE4|%5E3D6K`vtdN>Md?DAzzv!a8J12pHmwT)ew zngF%jb>YN{xb$E>a$6i@d6A1sml64?I%O*Q+ZTBMH55D;+g-AutSWkVza8FxS>Zb0 z9rK>vM*)K0xY#m;Rq2Mg6C>Bc_i|1T;i73Vrns2nFF$gu2S z+Q>O;sWOBCfm%uU4lMv&urVPCj(%ZE`@ggRrwX+DHBgvhLfe$gh>9W}Ma!pwD;Gpn zy(j+ugthn+CI5-FJEnUj3J9WN=<<+6c|m<$_B*~joC4~B+ z)XMv>K~NLE-co40anN#7j(yyFc(OhA`FTrmij(lMO;`_h0TdMZh72ikC<-W=P_&?E zL(zev3q=o#J~TT5nq+0;{K%-F6<<4S!wEM)9OY|uf((z2caP7@6u=CTj3lkgo%+h9 z=sYJgb0;eH*lxBR#A&yvX<-uXn}^Q4Z&ogEZ2(VDPj2EAY_n@GAPgMKiXtHYL75r9 zkoB^W0zW@5TR?*uc15t_KcI@+sIN2?ce+tWMWeKL7X^uQ)Qu)6ZYk4m^WXcn#tC7^ z-Q)I}4m$}hTwK%LdmxbC;p}4nur8`l@%Obj+RY;an!E!qWWR>mRFzs(P^2C{y+7$a zxeR+)&!=vS^ZBajy=;dNxlmRzjl3mqsu=I-%txesFk>%NnR+%nH#x+3FE>!#PlC!> zS*B^-b((PcXyp-Fc%2aw6tG3a7zkhWWCwQ8#CZ$yZY!I0FM5?7Zgcs71{*?M&7h=N zPoF&WNMw8putA7m`AEzhF-fM;^W6+J8u6~Ui@;REt*++5rnk>q%m6N%=~krB(!G3q z48#`>Fh02%=x!mr>I6~)1X6qZkfC#P3uvq^ljM&w_g%c?+;!V8^;WF>aj(F=*jRQA z?nh3w$*;#+nmNPI*A)rjtomiL^J}hQs-S2DPl44o{P}5CNAb__g7yc@bz6mjcG0_QNx#!%+7;*=3T)6z_h_tjt4f75Qdx{$+6GJ3fM$K zm~#0u>=7p<%mr*w0bwcVl?qRM?SbBa2iOXFXAir4ul5+05Cl$(avX3;0h1*Jg)=H9 z>~PK7*E)CrLog;NfF;m2cmPMZ=b{iFJmb}d$hZ4^6m-zz9z!us5+()YDgrn$KqWn5 z1}WuH#>;XDj_42`sUtw00O@(2lrAh?S-P?G8iINQsCPj60h7@uM_(L$bMzB}`UR*z z!14c>@M}G*?W49f;KuQD5bwO1j%W<2YU*jL0SN5IW+2r;dxqcx#U3Ayc>?q| z0H*{15Wc|-JoOcrH-@klaUiWI#%ggr4+0OBBsMEe8oL<4i+EAGUJA_P=!9Yv4ixCg z021$4BCNPv8rymWPw+g@(vliML;%H&2T7E&0*?n#6KhzNa<;Kz7D`MMs29H|4VRCF zp#7_h!oF3nKX}YHzc^l8 zy=h!I(YlZf$%(yEjC^}yuYMoNSsoQQ>?uz6AQnOE?{dPYqg zl+AwST-9k??`c#W%`aUzb2V9>?y?T^E!D!#Tna`;FQQx6Qvn9tCzMbm#y|dC{CFOx zfRo8nKTe#(c7Qx7X!`FI!mpJWMvw}UH~mf|o`9GLt;G=CaD?aar87Av+(%%KgNyKS z@_p2=ZAE~V*G=EH$+FB#T&6j*hVlQV0yF)jV3H|dr-lj6WN5u9!iMF=sj&y~mUnSN z1K`zgB1^zYG#BrN?9m-D!5Ymva2&}1(Q5<^>KqMS$h0W|I={$*XeM#D85Gg06~8z@ zGHlDwfF~d-NZogk_1oBOiHxvS=s|?gf8cyv5MEqHiUvTSj$P4)Y%hDYdAsXvrDU#| zzWC2D=Pf3q_$s{W0MQ*`T8XrrN9r{?G#EBF0D9%l$=jW#Uv`OoEhJzufJG*3B;&=V zL~=_wq$X6?3hhTCojeNEIjYZhu$SAZ!L!V4g%rX~fv6zI;WRXrr|4Rg5lVJyCHRqt zLXo~ZWC{{9NG^plapdH#x6$flZ?i%@&@&p0ujzh#9HQW1U=`V|b%mE4_cWH0FojIc zkSD-`ckco;iB=JJlJIZ_=zy_!h#g=c!-ZcAED|Jx92V|*dM5{wLeqBDmy@+}1@T$I zw;RN84(Vy;tb#dE;Oc^5xoxeN<&9PkA1Czw{mKAof{XvhF`?r?LA!7n;u*bINS6Gd zHCq7rr5o5ap+X*{VAgm24NZpzMCb=OO_VdCU;3Pm0ZjiSQZ#j5A*EBFyha*aHVZw)9fzb6%BMhVWD`b43Rg83E}^uK zhgFDo-hr^GA=W^eOR4C>^3}KqC#RLrh~(8UfsPLJwPRYq81(J1NXS;3T51?V7Mqpf3HAKZfGVe^;Ov#Ls_@TP>2= z^u>>U`culxx0>(?L*=`FP&EFJg@;au+L{_Q=S<@IuarHDS*j-HJnXgg5zVV?<?ulaNwP$j}huNLptN20)+MH(BL9LHIMa#RR|$&Px;mYIH_pkfx3>HRnF|LW-EhDU2!yw~O&>&~2Ys4skZ;DH@V^lDEw>c@}w*1*s zy@3W#+z1np!qvhRezqCxl@Yb3ay&S1!?v8R#58c7lM(i%)R9rT(lRpe?x=BD&ya_D z_N)2e9~%Z1L1nTUSc-4+U~Z)RsHK%AgxAOYM{tVonxgUf4fwuprEY+}+L};a%12ks zRrn3^lHklPN4iZs@Y`!XMz_&-f&$NsfKm61VRMyZtQA?D)-3Nu&}jPD*@EdPNH zZcK*6iVq*R8!7qod5~mqR*bTm0b*+?binBdm7m^9?x|JjG8ZW-T=A5mYOS}bQhgtdTO7?*e7E8&n^Tfm`-#j;f*_lq3oBULYhn7YkfV{EqThC;8w)A`>pzGDxb z4HP5KQcp15_$Lt%YD97*uZi5spBaO_r}h{86O&mOkj^=xKH{n3O@-bjRRV!G#^av` zEJ{w5d*N>>#k}iV0p@#ST9sRcylgSkvFHFPKmk*!+#gl_wEr70A*LTE@j9xZeP}#8 z_X!w0Wlz-8m};(ZmV&GwHvoEU1^x;qu>VPHw=9E@ZpJ|d1DeY_d1j^AUZol%c|2anStpGu(v>tO~m%-E&i15v4?3|F6qU* zY$G)V9I$nr|8TYF3-!!>Rz?B8wv4pG1e0CtTZJK{AM>Im@BQqK!|tDut|9 z%r6-M3F$mel!vu@v^3T0piBEa5-jG0BS#8Rai#}R7vAWdOGi<^_uerH3Klry<{lkV zyHEhjk%GJx>dt5;XO>*pu$yd7k&gO^5r~oA!Y*q={ge3xY|7ux~?v&Ksn53JA9t5EQq{Zy0M*{ew*F-#`q5 z)oikef(X~KO9f=Zi!q|RtypcR*gZ{1B#sdeCy>s*63!$_GOXbm{{f&7-rdflBEy>h z4Ml$s1vQsed98dT7qMo4^T015lJ*MXkjat2w@?+oMm?70KHe|5#3;5Pc~j zu%KHH+`{o`Ww^qwD*@kEIJ(tm6q?(cd`cCnl)vFKOxdEdBDsS60)H3%_X6`6e*k&k zq<{xfgi348>fOd}DgIIesgw6H1w9sT?OjG%cL!1W?=jD6*U%wbAPU-3H<8|gCiZK1 z*Ah@fum`uBGCTw1D-gO^5lA!dO(Vf!XCJ%mCGK23W5L0L;Qmb{U@3FxTa46R(QY<| z62s2fTsnCp1`VKT(YGw+QtwmWZZxeI)5Z%)Mbk4qg~4-_p$tpZJV%rw2uWVm%Vq-W z$xcAAs1m!7%klCz{M-(;E9FjD^J#V)3od)L{*2rNgDAXtz@KneYtgQy-*}QlQjI^u zZr50}_Zr@tFT3@XlkT1dj}Nh{f(CCL9efW*EDC;_*R*@ylN!0(wGnL{Q(mmUj%HjX@4v|WaZJU-UD z=hPu$-_ZkM5PU6A9y%ZgBIw)k9Wq)vk=A4_eN%0n61_vfgtYai^4S+?$ypRNwg73> zT7g|aX7o@37S5*J<~W0Pfd@OKl#;t;s5WRMJI?(+~Fi^{Xv&v$+Ecn2+zBb{`b zqKuW+I(8lPv^2g0YPJJzw`1a41D_!n8(?TxeWz!Wqp>tpvg zFJl>MNzy}dSaqlQa>r-5;!TNRnf4YFYQPxQUgjMxa5~8IH~Pqxvdv<@wSol=oz@*O z#AcCd1AzlE8sM9%-0;({WQ$g@r$0_n#=?{5yExMPV)LS&U-;0%?T>@f&T@gTWT0)_ z4!%BR3g2i%*%wHFq7J9CUmg})!LDP#^+rlARauEfkg`Z)VQM?Yg~TQ*nHa@ZUKGR8 zejLSx20MfkP%8mMXQ6IF2kIh(HF~Yfd;5r*RpA0+m+?$jud=Y9iF)H^`ZA>DxMo|0 z+|#H*9Y%O&=7(Ix`~uz+%;VGRf->nU>YRwfq;zI1rBTL>LW~U@6|M5S;N4MS6U8{g z&}-g?O?a{t1i{PWWJjk!HE%vKB4nSZe zwe1Vh5hz>@Qsfkiffe3HK7j=&h^iO1`BT^(>)1yIk|na1_J`~I4t~UKq`RAf?Tuf& zdBc9v^nni?18U!ECAz=A*?#YPk3$+hU;~sy7ZGbifn*7mzashi2x6C04X4is)S%UHuW zZsk`zS4wV75wwV`S~A{KrAW=s?oAuN6e#W=dxN0v$Cj&Ho#oqq;uy?4MPFJ-StCxoLTCWX5AUjxBZ5C>G8yo6s!)#}9b@lMbvZ76yNhX- zgc-LZkH;cEi&G%_S@+Ln!tns2EcJ7}BL)l*7dRPZzom@8>V3HQJr^4mTvnh9F7}F^ zmspw<5Zo3Zd;;a`NE^tH5epqLz)d#PhCBsz;4@T26vW_-G%!$wLYjWmCIgsW;@hCh z_fbH^W?B3Vtpd`pga+`n6K8=)nO!~L0cBzKO<&U!!}j zww^8LQ7sm~Nv=wk?|0Qv(~Ypb>uL_+>z^f0_nkpI5Pw`M2!%uU9)~X*D~~rtRx{W^?wsY~rA48F7yQ_P0g24e}wj ztOe}+p|_R}kbn|>-Nz}}oYNkoYM&E1x)@Sz?xw=z2=OV@KXTaR5S}aGu4$XTiMCTSvX4-gbL|iCACR# zbURFXF^v5oGI${SEBn-X5z(tWnv<{wNKN4IB#O(oSSTZlsA32l$@sB|(nd;bc%-#t zUFnvIfIwN7^iW4j`(A6bqPDQ3n={5$B#!Vb3k=VVwnTnk zKyD@GL()li!dG)pJV^99TVP!W{4;ck*qMQ1Wi4j`67aJrNYdHD6HdHz(#bFF5@hc} z`p5wY({G8YXaZ1-^Qk}h(@VxF)2#VLQI8+Qx@@QpoX;q5CESH2hNafjj`9QDjiBk& zkA?-otpdWDthAOSD7A=*Bk(RJ_8^o;NZQy>F{KK^)(RjBg_ClmD4RkVUPbT5{lVCD zc8J;FxSma{q}T%dbSxUD+WF5|`X_>}xn-LHW|0Zy0%L(asu)t;U>hj8Ik0%05*wmd zz)vEZ$WmG>S4rdk!1~LtGJYvG$d|^Un($bQIn@I;P(5lRw##D3d<*KL<^9l;#XSY%rou>QMuPeMuFN<$>06LzPVBF57&dg&L zp<{$jB8Z`3K8*i^8G?d=;gY!H^jYr!PQ3h(!M>t}d1Rx|a9tyHyWh=~pZ5#J_n>aH zP5vU9e-T+4-Jm+7P|1dgl|W-GZ_w!XKg0*tx#C#Z&AuIhS?A!o@I{E7wfc`tMk`ayPIM?&EB5 zVc$%E#MW}szBBfMoNH_YT*-{E=IZ=I8?h;(v|idG2NIxoiiZo-ddIiim05jvFAYHa z6AVr??}S>;N<*`^H0hsIPD6LfKS777papj zJAl896Wg+E!-%p&@kCxoTJ`xzu`FB(57japc{Q&_0wK)_mU0IyE*Cf>IuK}CJcIU8 z948+cl2n?QKE3pO$%%~M?cR{kfwW=WCPL4*dHL4HKQ`>rV-x(*LNub{d`4yD1N^JJ zhsUFVM+{Y%U{gVqO~45$idp_lM)L9qlB;tJ4R6pfF(b3wJ48@VT{;P5w4x%<6TsEY zFc4UBmPX-7USZ^{ii`2Sfi~S-see2*$3SZV5UK1UAma*Z-A1{@Hur;aBDA;CS-nDWuvr6z*m;7`STMV~ zCZB5>ODky)NJD$A`*|i}ZaSU8{!7RcnD{3WT;nPa^?_1qj?Z~5UFx-Fc_FCi`jo7Vjj#4c2+XNWw=G)H>)Gx6cy=N?qJd?~A4m@~ zc0xCZvX68UkIKs%WoVql9f`9p@;9UygdaiL{E$DKfhA+E_tS?D<)zcal0EB?;SIzH zuC}D_RR~JILdGpZTRD?7i#1@yv^#q;V>X%Qvv?xGMaKwa8-{kePni?P%d0U4?604w zozvO++hJs(7;As&m*H*mFWQxVb3zD;O#Gd{qWvwSz41$bqwL8ztQ=Iw^|0`nx!C%Z zzcGF|Xj@TI{2P*bg|v&4+}3K}|II(8f6D=JV!iM!IT6oYnfAp0((T-rgDu0%^V2OyOTh_GcnjL4(?6~kVk4bF5ehYIS4GuaZKr*H7AX8fau zctYhGlN$c#H?yalp%s$2IYIjeFfVJr#p69 zpdM80cxpucv!w4S)xUm0KD-<<%AuIpv7Nw9Q(SB@aBy&+$WjCzMec=sB1o&*xPh%C ziq2QU=bB!STb96m!6m_`XO;c7hm@Pk(Z+FmIH^^AITCzQA*rG72yLRd;KZZr2LG8J zcMB)i z^C%w|G{@1)@hpFdAw}5S{!1$GSWZ)qgO4lsvEX3RUCWGX<3O!e_<*i=)$gMZk%H*D zRHI`nOxNQTfyB3Sq+CbkLmcEalq#>y&ibeL+t-KF#`fImS93!9Mx@XC)V$W%FEuOq zMa8^tjAO+q#b5$|_juUwOCpIztoa$~TC?hVOmcpua>cI~rZlpEnV9;fymVvgQ@~BUjH)RjtloF!fAM= znfI|nN{N^_k`;dex1Q(a!l6W=rC|b9_JIuA9wN3Q^s*!`z_0RTawN_$@+>mN%-;J>qoVQm|qz5`7;Ll z+Cgp;M8pO9^zVRdEfM+8@&W7 z@>LxFP?K(JU%<)uHGHNXHZ3l1xv^hf;2(eu{fW{&d(rWy_#Un9m<|+n%II>%wIwix z9n+4%1}!Fz#bVn4redq*KLn_LO#7drO0r*9>0+|tr9-0f^rQo{*$>Fb!GHOAq9_O& z5(xwDYg%VJ<-vIdmqE)Rz-6VNk;nCZlni(dzLg92_kkvq)4~f`8?r{$gs$ZSb7^29p=bxV0C=qVfCzpf&&c7`d9wsPmH3iL9~qSf{59f~O5Gi(Xmdlv}rU zm17Rxf|l=O<>kBnbuJ#c3zG}B@n{2;{yJB>bOwS*a9iM5QOIGbc|NbklQ(Y(ZgwYE zvb4e__Pnn+Ou!`adHUz(ZiYlc;jNi;h6v*C*4Eu6i=}-Bvh>jj^H$5cnEXxJL460c zzx0^zXQ~Pdef=I5H52^YU%Fv`}Yb%j}k|X>I>oaq7=a`Nt8w;{SCF zCOnV7DTtC#21=*|oJbV66s6+T6_L%8`7s`+*Yck)Gi2_)oDeX>&hCrahfJeg|)6Z(6=tnF0Iu&hUnU7&r`q1|DBvbim(Vh{LV^Q619% zvoR^h$FSiH2^o3FTBUtBl_qa1a_2wQtWs0I+(-3wz9&L&b)Np_C8KF%Rv&&mEwNd@;lkiHI93VaM9lXyhdZ5==V&f6hLvZp0u&c{TB@mD1KcXNAgw!1UwPchm$r2RUZmrc_M@=@DxW?h?J zUl$kfdB*1|O?M~p^!*@1PAHH|^p3J2oG7+)XsKRdDD#DA(ittbF@yAcwOBMji;5YKE@>rS7)RGYW z@Z2F@kcuW)b~ab;oN5?i3DeNo5|FFo7*$pucXp=lZ@JZc&f1K!ZjQp=w;D=F-~bor zIbPe>TH%>!?J(8qxod>7S!7fC13{=cg#CGGut=dyIJ{2Tn^5+DW~{H7t#$gRE>0?r zb8np(dv%x&{Y3Iga(jewo6LVw77A{d1H?zJ|JJ`O1*CU$#_9jA?E!WZWuC|ylxzxZ zmv`)ZW|7=83i4_>1{5d4asn)s5~wSt2ox>}nt)b_L@001$#vm2N^dwY6Zt>L{pC%_}q|lBf&dS4t5xa(ni!jLQ*s(UyFk722+*6h8 z$qa4XAXM-A)_H$}`?qXci78cDB$IT@LIA5RGG*@z)VZErEf3AgX;^qZu>7yG{S=4U z3@de+9Cc9mxzu*LeRh6s|3bwYe5N!afbT(8>iuqKX2gk z3T4B3B2*Vsmb;l8CMSuz=|Y)ndQrl-RlikP&tP&`{VQXlMwpBdg=M>G8?yK3N=YCfUqLyoy!8QLv6!k*<%g6n0Vn_PtLIIb=s~DDo>(76>Yw~|7 zq;ZO)`5*j+dv&{8B1fXD%1eKCxxhdL=5wnvpWgr)nbp-`pqNK945FKEy)>I`P+saY z!|Avd)dB-szQ&LPv=Q1GYF<)BPksD4i!(;Ah|yb|>-}0w*^#!-v~-U=MDAe~m`p`Q zYY*L-LB}wm2vIM@pL`26Kl;bsJ+2+J72UYxNN8p4c?O=~UR@+;O}FZ@i@?P+PDVK~ z4^s?W3M-;y_nki}#_%8<6FJThD`iBRryS*f&B>U8aRL+~6pWco5DDoSOFkV-=39 z3h(LLUFT@a5p2bT4N3ypHpw88HwGOF9QL&3nkIxo&p?AWGb$?ufkF)LUqZqIJG(jrINR1c?Lv8r=hZsLGS^atf4bS=Q z0v!+OerxDohngbyG5W|Y&UJ})?}q7h7MzZ*r2d4CUW3VaQ-`OiWGiIbr!z+yhK^l} z#A)c#$xTc=KnX$T5lG`2pY!6#pr1rUOt~gB#vMnEEPRzt6XVRM1Q{OCJfuhM#2Y`{ zpiU5J#?C{9A1(yCj^uSt5CR?`7Mpwcf}THf=rEJx)w8%_xI=+1 zcpa=dd8sRM)M_yGIL6b;2+C)^59y>*vR|yv39i&0UCG+JhciqKP*PdF8Ci9n*}y$3 z*)!YOgP1tS#~9ZBbe!(4s&nUBh)zg`*i_ET-D;|@50$`SGd0#g8P#puuA}A=ap#m3 zy1m9%*}U5~<~xn81-n%PD!%mM5er%~LAp524QlT{xSSj_5t&2LYEb$DE*jw89%NCN zub@^!7y$-f@FUcl?vb*1M{^rhfN)h zBVmQh!+?uxRQ#Bnz1)ducAd%vV*~Bn4b|d^t$MKYD;jS2sd~72Rk$H8yJmDjO{H~vPz#QP+{BzkGf*u?oc`77 z&Y!9HfU7m975e68O5wha{az@!7LQ6}sm@%O(U8#yg-75>nPSV$etAvj&hFNs~01c0$MjP+tNhgV_uw z$C*wOEdQga29ioCFh>AUP*gi3;$pptM97p0CYOpBVoW0YyZJOmL=?2%GtFT=0Jo~j~<;OKpZ3`3Xeiw$P|m? z6o+z4)9THMO4@Xmte=GP5`K+U=tz$RQmb5Q@=K_WC>?myx+D{>?0Kl+jR-_D@}-NU zhw(MHuy$wxp$uUyqezbw6N(8C;%^Bms9n_CV2rE!c2iD)DKWj^3u$;bPp@U-yYlO@ zl4#w(G_yAl^vvn|zm>9l^|yw@r! zHu@urX9HX4ryhnuAFBCDyx)mgZ#Pi7C%-QaX?4*H8;iM<+O1otSt)5|l9R65_jcL@ zSIQwlzv9On-jxlkVky>DZlEnI^?kbcFD3J1O7z^)1vjX;MQ_4QNi^|a3-C-5+=^`K zD^y6k5<8{7*9gH{D={Iq9rx<{-;7%Q+^p z+9D75fRPakPMvFQaUq8lBS_=|-zZzkE)iI;K&o=1WuXX*MO*~LR`uS5f_R{auv$h| z;5g-Y{eroQO&p&jgbs@tIHi6%quwMV|6gIJn0`x2>q^XxijXu&{fDL4KZG%Q0xO;S z!R-c9v_OC-&CPJSJ~vT{Q@?5=kFxZ8AOz2U^~~-#>%xt8oN~OR38mufFXF86wn}}A z1*gn4H{GD1;|oa$?nMqoT;QGCa>9YHA0<6`Yjac>r@?tV7Sw$bk}q(yE@;gUh}~4{_8IL+iw@qa>uOFdbRsS z{?KxzDc$6uYzrPa6;b`)-;H%`ot0F!^o5oF#fY;f-ir33UV1D?<9sFUtBq5u6KbKQF2D9H;MF+oMlU+u89JvG`Ue)EPcqr&Wg~6*T(oL^)*~WjZj=9=1rW*NPnf2R@?)wFH69Z(pLM3nq6wis53f+eB)oD>g`R|Wa z1xVoQWrT79a_l4mn#XSkumg&BLrH7`$%nIGD@|4IM<}OH-)(4Mn@Jet7O&ZtoEfg5 zcYVN6zi>e$6GukR&gIzJ5!@<_OI(qxYY*r&L}*t8=-QJLSHuaeIOVvfb&iT_qPukM z1gP#C2oi~KWZ~JlJfuHyIYYwr%c_5052CmVj+S5`k%_zu#aw#SfUmhhw|prmz7RCC zSgK{f$;T^G71o4$*O^Y1DGT{$`KdU0u&^4X;9@aMD0>FEeGHL{5^&_}xia@48LvF{ zPH=+3X(`CXDaWfCP%7>hB8K3kAXO-QqqNAXB01TnNOihv`-7+Wq3mi9vvgX9;z({S z|B#?MYH2btzOUmyPfFJ;%upnR8@}oID5^t)lU-jF>mN0L3oDK1H~|@AeHmY(@E2zX zQOrmr38o(;P~Le*yO+m+u)&uH4~MqqrD+zXqmWdJ0L~Q{xpYZB!)Kxa1Bdl_26u@5 z*SF|qs|bEt^$vXpU!(YHJs4UCs)?;>-1>gfVZEHgfFQu&a1&f4z$-Ha?31?m4Z6t%`diujC}ej*2&{< zK{CIUiwB;p+4ZvZWhJC}iO<-c4EV<=S!g|{iqwawx+{TONiRQKieGwa4V-!uMn1_u zc3t^ml~AELE7NUJa8oRG5}8kav44I=t{|t#IXWcYsTq|0ObiL$%7Wsx9x`DPiV2Vr zNa~3|fpuwF4k1*YuME##oGBDP7y4vPI)Mdy5r=CI0XQTK3{Xi!Saei4mcerh zgY#bbAy{%}Nyxa+KRPD#>xzsgPNv_s1M8koeiNA^rokzn3Eou}u3V@M6`R zx7mKZ0mx6VC`agXd7o?FWlFvx4kw_D$n|U=n3=?QL%1EU^5+~w9wtSJE5D!x5#g6| z1^TS5tZ`Z57g0oxbXz2Q7BwQlbBpSaQ}Ae+x^zpos#K5n61l!V!#?98Ps@)_cTgY) zWF!Y%Bh_BK6v4oQa7G@3|4zX7DMgfwX@uK=VFa82g$e;dhv$5MFtRJM3knOvUu_^O zqX%OKAsgj_ufK%Ci)m}?Xz%Fg?1ofiL7nRxZAV9#ZhL2^?BqoOpCIIQd{gM;2?-2e zJSh`tW!Jd2))gEAGAq|+K@j}=9*IF}$#0Cz4bMK5-&1MzOe`vQ}Om%F@Xky2B*Cf}EL_ zq5c^Jk}Au`vYN@g^pA*%2V3t*WHZVbRh^6)cUw&0^iNWk^JxV?gq#fx+YlJ`tWRSn zc-F|{#~SME)xAYWm&Y*?A4nw9MVuK{yU?GR_ z*>^QAl6dOMdeO4gA*Jd}_kqti!iY?w`sjrnqBmy%J_X$tFv3Kp|$rI zG>yF5*&0R8$_16_R7(asb3X|WKsQ3I`#v|Wt~%;=EzV2OwY0qTCPhi=+OTLre0j>U zmls+SySq(^jq@zD)NDo*M6;?E=7}6TO~u%=^jfssMo9W~8ExZ&mifB#J#zx6);V^j8k^uWM)VD`V4cWVr3TkN;pmme8# ziZeqXJ}^Dd9xyeENDT4z}! zpc80?=nvK*V@%j8965hl>*J%lq-@)ywx8a)OWvh|J2orrqet^{Hf;<^@4nl3rWJhI z3MdOXVHpRC+H`yRnETg=+P7#19mT>d1(lwcdz2~e*!EHJFXjB4$$s-Xzp>@gDWzg+ z14mlx%v}R_Kfo0i75M$Q()`i3isNLw1pd5Sm3a$@1+Eq8fuEPcB{&^ju`^PL|62O{ z=~uaqtLay+h2u!fHOe)pA42wvA*9+O#eO{cYBZKc@T@g{{5CE(%JE0cGxOvdG@L-A z1Rqo$8$G^fT6r*-LL=Y;KMhr)gz>~Y@H775Qu5S5{$ojKM0=(Z0#Gg$(YAWV1|VmJ zK7G?+2<3`qWX;f)ZXoLqaBk?(Kd z7n{`3tfEXqMpv7-S`9ZJ)bv*PACj92TnU^55&Nk^Cr>|YYA0rrw@$3WS+4487QSj? zE18&SY9H3~oI>jUhyzHK?v*2$RZdOUR?^awGukuKlULHNH5I$)<|K*k{|PgC-sDC{ zK|Al!kfnI;73fchc5f#{8~8d|qu=^bZ;+t6(dor3bVk0U>V1lZgf+}_kzyir=~Bz@ zvke@=#LuAxkOM;~miMccXeUtC;_1;k2qBL4B#(Dbk)W8ERX9=r1Nnzs0!{$ZO~pLd zMGy#)2kpkvH%&F!tqtbIZy0#){7#>i(j;09ktK+r8DcWqLJfmtC=gt9@rpM|0Rbm) zS`emxKEoW8B>U~QC`iOc8i?>(q^&)>o;ZZ-7Wh40OdHYWR z{Gboz#*l88tLNm34<3*yQ(JrGNJLj}{}~V3sgf%BqBf2Zxw+=2LqER3U|tBdtqP%o z9Rl@NG)11fZ%D6Reaj?VKYlB}itRK0ISF{-wZc@n6!s5)Cg93bg==9iAbmdW>yO5w z{=VZMBM=QUAX0^w(#ASJVWYCyRNDmWJf+RcfSCT|EI}Wnj-)>D)%jAcf72dh zSem1S5xPz$g<3@B$aHiB*5)j|AoSC=0AvyL-CSP0OFro<{4R<>e&AxFEOz3Yh6BOA z9~I)&iqTEx8FFKgy4km=J^YMilqM!!Lsd9_j_z zzNQA82(`XLW3)oYS)^Vx+NFo>1Qr^Ba15tSm*uMTEp$$m+oj=?d_BW4V_0zo%{yGP} zLn3}bu#+>x-}T>%^_l=HbU#+opEn>5=a`_lD`(dJb%EI>n!#$UpCWs(qlCd zzR2fdxe7+O5y=`jmZ%XylM`=U1bljyg%ErASY>80xPB#x`*}DzxqdyPAslt*)I;RO>Qex!pYl zf}1Sn%>qGp508q4PPcJQ(wA*|HOa))xWMcIqn zoG2mM!e=j~v%FP`6#I5iR(=u{bb+$+?Wy)kg%{}mMoV_?1Yv|&1K+KM=rf!Exyyj& zbS`%D_+$tnqFkfQz;W|B7o$0b8h)?V53ks@0~7#eMzfVF6{!}>OZn{r`9fs{D{N1( zS0OKJNC%zZL>IS-vQ->fV-hc`w&tNT}VQ8+#HRL*@umk-R^96%kE&F<|TMENOf=->Uu=Tlx3^myaXULTA z@1ui1h(nv|!6}ZQ;-Y74*_4*Tgc!t>Z|EO#)cfC4$Om&0YEp`=-#;|W=iDCaSzYI2 zUciAN(&#=+&;^X=|N1&V9T(+X&Q6R$wn@kSf7f7vN?kmF`bj`F2wGk+#)>}71JcP)dk$*3Z24`o%=C4ET6?MW-$xsq(W1BMM zLtGt^MB=^6`R+L=0J#Fgx6ieEF%pTW;||GlU{q=AVv#!B_CsvHZGQO>sOJlSey*)J zz$+()hW@mqgDbGbLCEOi4cqJ>O()=^#Z92;eod?WZ2m7V{RfgBf7|hJH_unr0L5T%GW$%u49DM}I{DkcwwUN`}u!C(I z9`6x~JX&r?mZD2fj5G;NL4@M=T17(x7vI>$Bnb)~qx3zC3hCzzC$y;vd@{F&m3{JH#LGLaC8??aRcN!gOfl+b2`&;pUGn=(SRQ|S##D~w!s-HtBdBcsxshhmK#Vw zKghJf)Hya;O19e}JijQ4$X)qlQk(_NGPy$gUh<15<13%PQo_{O#AsBm)l@sS2xG95}J5P6tOHpqDe zFPbiGS4^Kgm}8nWs!y5qF*##rK*7IS0@1@Q0_8{FwrX{`0xqwBZm802x(rrvz^co) zv~S7j1w5`GSEoI1t31_+HddZGZ@Z6lPj;`w$NOzd`LR;>ag!t}=Co{fn$bEpe#)ApCZ zf)8U(H-Zz?^&#QbRDJ5mSrX;!_d>ZuD*RVKP2!q8`56d1xV4Vev21~kV+wr9S?nt5 zqd9pCCyh4weo;e#Av?)bVJXr7(EX&h#^hi4J2YU*1AYHvE}jcGi%CK(k2?Xj&fk_G zqGRpp6H)341L-;j`0<O3TvI`)u)^y0@HM&f zeU?+IfVAD)2zk&`wr?y1azGX62*y;OBL5% zWb|?jrG+M%hFrb~(bI%RXHsfnfn-1+9BW|u%zy`{ydekb7yVqHU*i?3CHDX9v7BIZ;C(bC z8d$PNcIqAf%6{kQFoQ+KAX*@$Ea}O(=f~ zl(SiYi9lW!lRLsbUpFF&QYYmKX`9W+f3c08^U<|I&VRW*Kpzc}AQtc$p+V8L>$sMc zQJZPP+$43K`QGE#GXmN;L0hg!G+;0Vg2d(BVJ2T2+WV?o=z<^|G?Up`SGEKV@y=f$ zm1pUjee_Fg5uJ6U3+H)YZAqF1%+ESp_}$9|g6#5Igc+3I@nnl)9=FykazrLqi1&jN z3;jvZ04v(x*4|Dj!QP7c{3QRDHD{hC4(aNP;LGZzl12GxF^wDNd+c!dL|b^m8Ib3t zUd2kQR#+%6sFCT|H*?pYha24G zgewHKM8C-Qmymh{5lVxv#l;(B^%X3%`8Ee;cvfX!09QEQwAF zURJ88Q7yP4b_~L^RjfWbdKqZH;&piKmS_*K&I-o=%P8Sty{-*(zMfIBb|cwJk}DyELv5ux*bYIhfl%b)1c2WBPpP? z-nmAeAjUA5QsfIsXh&1Eth&KHzC&|J>q#)6ldz^x@yYg3&ELTY^ zjDCExrG6i!flqyB6A9t@t44LvN&dDH6e|YHMJzUxF%s?A36|J+bt67UV1s9WUL}`@ z4iUtpx~5#4b9J-1=WvM*SLJAAL?)NPBcEhW^0$h&i?^BU$VH&d?8JSC47o*6-ofNB z89n9;gdhe|swXKJ17afM#(c*?GN6Mlw#Mp$d=7$t9ZWfcR>H5(H)kX*l>}Uy`y@?y zxP(SW8NPao?P7I@MCfjSDtn5f=&4)-UGX`V@#=#{J*be1ASS?#4_>{2#6evPX~H;? z$_sFtn35oTUGK|4=}l_97<2o5c!5w0RQx@1)>IqgE04zezVb9a$G{2DYQiksrYgSS zVz{(~>l*1UWb~f^#|?C9KKYMwI78KPyVQJV@x(FkWfNoPDxU?8kdXQo^W3h?c238c zL#B?M0Ifz|L+wRKc#fLXaI0wOJJ0AR1!4Il1oI7O)o2rZ(UBG6y+d#uO-oJPfKz!>>5+d*q z+!Gy}B5{?X`~p4D2lkh71h$JJBgmJ?S~0P>B>&$cUj>F(w7D-(p9%`X@)1&{Tt%r1 z4Wt7F{3ithzD<*#FJBx2gQCkQHU;)^S|yBYkbJ)`KsgPe^twTi~saQN^T`-Oj9gUN_O$fZSJDikBD)t(LWGBd=Pa|5rB{ zsGbdwTNTE#a)S3AO!v0+YuAXovmzQ6WhYK`A`~53sZ%$W7vN~v`qL**o@VKjKKiH$ z#oCE{MY69SSJ?L5w6--x-trwga%6mR_VDEB;aA3|W?#0z(f>qgA5^F4BZ3#K1m)P& z>Ye`VHjO<8_s}#lPpJLvw@sTODX>hmh!!@DKU*BM=IQvZGpRlU9xQY!8tuNlpq@|v zqD|YD>5pK8To}xrtm3V7bvN}|A)nG~9Cm1d*4dHCdq(mfLaOT<`@mubreTF~(RC$|ufBmU#JLswYptjmGG-NcaU^53Cf6ISSm<8m(FTs-tg6agR zSWrwFUhfIF9+gvxVJ6K7^{@2T=6~@YPj(s!@}7AtU_$&Bb{dw}yiVx&H~;zw5~7=IART!*Y94n{B@_N5{f5^_oM*@Oa)crYYq_Q~<^^7m{Q0t~T)ygU_61AzEjJF{|6YA&?2`h9=85_@04-EL zX&}vqhco-$Rd5BAH#6C6#@n&B*Y_>GoBYRNzk%kv-VHVamCa_dzv|fXwO_5#RNKmY zwKO*ED_|@MM3^$4FUIz0HFg=e#%3rOq`=~Br%x+gdd6k-@}aGu7!>j;D(G_ZN7k5L zl-U!#b1i{S#EO4%dCMnVE)cVJAL*FzIH)-Wz+w>DRO%2`qb3i*0#bX&-k|9kS%x08DX~6DVmE9UC^3d&sCz8x*V+qGV4w zY+&o;KmFu}#r;K0N%xTmE<#C5uw2MZMRq-wSSrr3_=o%q=7P0#&XFivuG`vsxgYdS z=*_;`3bxMFu<5t=>QQ;&oncT|$VnTrEj0F!X0cXRNWN1hs+_AGi?Cdw<5* z>(>uARwbaAD#wAjR*e16*SKDj-VQaaTj}LqR^|(7!hGdr?)h!Kw@)lmwgv3O6mS55 z7N470yEWRqe_hX6D|F<=f*lh}&F(!bfuS=ep_1)OGcT;jaV;#TS%`v4X9Bbak}Fo# z6XYawwb!MunKE)}6pILCYJKu4cD-_1>Ha*g-fBs!Tks1nehMtR_)Sev>PK83`B>0$s7aiH2h( zSYJOXh`z9J9=qa5+REFXYf#t3Nso!6nZ>X#$(u{lF7$T zu22nAtKbNo88zbDT`DxPX}T~n1%0HM54$~cK>7FdR66zTkKnhj(3l(sZz!npQN>eE z#gjViq8-o>nEyMMr=JWc@K4)HU`8^q*0&0;GsJlYzXsnLKpAo-^;Ne6#@2^B^h%e#-YioWW+L!A}MLi0?j*&x+=IgBP!_M@o6G zc{w~sao4UgEpT#(emP#(RfCP1>A6j&Q=@0?N%SWq06|BkES2krWLp!{N4vuK=6WMn>v_b&-+sy?lX}%d3U5Y9U@GwL#E&g4vuPk9OVqtTB{KM)%5Jsa}-e z-!mbMy(dobn*@s7-#_7A^B#dAX}v^N-|R=|f~eTw&m1n55>A-rF6`^TOCK~=iufG@ zE_+dBS`rz;k{hsi?m7czP zt=SU^o;qDtnxAc!61be6R+Qr~Bxpkf#8i*^@*-#ZKQQM%TMRepDZ(8|L4!j{SwP8D zm{7sjJS2dXIjHDb8VMV+ln<}^wf6l<9)$z&%=d%MvMrG^wjE4UIrX(BwsoZH@R84s z{)}L%VWn2T73uBwuNRS>jk#L|<6$eWK>TJ)qrD;>I9xOi1p$jy(!`#GHO34UMJ`m| z)z@vx8_2cJJDy3kwJLv~`)$cMU!@czxuv9zq#H<|Ktwz4vz-mV%&WdXF~Z=i!PbcDZubfbt%sO2qsPNjF{ z4YHhuQl-(`>Mh|CIbxwt_hA+;P^zYI1t$`qSu3lOdhpDsvo=|-QtMfkr3}?`wSq(^ zQ0yk!)e!$`=~jplwxSHZM$9gh8kX2=?aC~0NGfwll(X_M_vK`Qr3>| zzl~e><7EUfmgfMxPxg)Vr+M9H)yxJdRR~ff2}uQsASmcQ7x`Bid5cQK*wb-gQcd?= znBKE*5v%o zD?f~DrPw-J0*iM`D}!|C64D+*;Hljd3hUQ zaKv&RS;l~A`i9t8>9N=ppRt6f%w0<6qm;+o0tDtYDuoRS&6v31+_AI+qFnQD*Ed5CNmeT(#nFi z45_AjQEIFWIi&ErtKM@@(+Ao!jnoqcfC%faNdg8apQZW<1aLsTnqC4rARjMvAck)p ziX*($fyMZ@L$xHIwVJ4dWlfa+u5Cj;={v~f$pv&OO#}(zaqoN`&1w^bFG$M|%9zPQ zHF6r{Itnt08$CtF!9MK;&1j2OG~y{eZ?Hiad`x2BmPx<0fo{LK@v&HtBpulGPFZoU?j^1VKK6%-_TYzo2OP}bbW?4 zo=V7r{s>gTHW!g934XFR2&(xO8K%mbEf`dewj^3)941dwtEX>ZXk=_+YG!U>X=QC= zYiAD!8@9oWA>%$X>L+7X+vALBcO7*s#64e{iei|hyHPdimhHIQ9I1b@lW+Aji1H0q|XUe@XGTjY07ZJAE<-UzJ8F=X&XQS5|G+`#4;%dX-6(Gtz2ymD)RcE@wNSU=z)eoQ0Q@|99u=Wv#pOV}R)pnCF+jKJWW8`ay%5>c!WUUitQC!{QFWcE1PbhpE;- PaXo+be{0we>31`C1)00bZfi3|sW4Ge)Y8+vFJaF2oire z6Q%w*9*@UcE$Y4k+e^FZm0k67gIxW+`kdS|b}&XiMSq7>q)bYx2$o>!2#tM`J3!Of z-6gqP{3N;LV!d3FCbcw|CKZjqK>q{y!)|_X0IcwQ+DtC0gcbP84|}u$I@pj*3Huz9g3@`{>+yd*6g1KS(89qAp8!=MX|4OE;Y>cP@cH1c;ddwB&%?1p!gJ1o!rlpf(V^pj0r~kCH=* zWsD*>N^(e{cTvaIu3C46yZT&|jYrl}ORRuc*a}(a0EmPob^v?@M%l{tRjY`Hq-QO; zWx}d0etO%zeU6aoHM+(NS|#i;|GU3e^N}^VyS6T#QHYFX5HiXB>zK<>wcB!b&aoR~ z1Lg>j01-&GF979#J&Om>bGj7(Hhz5YH#QLTb58)iUH9O>KTh$L%of0nUg$XVOsuMY z_ZbIlIl}<}{;GojfOcD%=iu@vX|%{qgJ(_ur-nx>OOd8py=BJjbt@gP?tZu*>%IL%@9#s4EKSk6fByx5W|k&HtwtOSyzH0jwYpX}diyi( z>w}97t)jL6FM9rS&s}%icFRZ3JK;(D?6$_FQ42ZXkM+2{W^MnL7oIUHv?m^Sy?M<* z+Eq=7R30)`Dx0=%523N!~#qE^`M%ty+hGH2Y%l%#!bup`_#s zFZO+@wiB3N7lLar`?*10Ejn&-l03!clCA9Q{H5j9OOke|?=q5UO;d0b_F@+aw+OOB z1UUvUW+1W-xX?%=d`#eK`DfP1^XEsxV*0Xj{4r5s&7@nxl$HrA(~qZC!o z4GnD-jJ7r`hJo;Lfy||St|{0&RYcq*Y(txb$sonpdjRaXoPm=7cIVvQ9iz40bnj_C z3DXR4>O`e`{sm2rP>|&T#NPxF)klYd3zeM<=KwCQjvCw7pPbUhe?KM4aJP!gJ0VR>p2ncjMq&9jfH1sRUAdUU02X^4IL=^R z+cK{L%09!BIrOy$7-JV&5VD;8x+8>hM1}$1oxn^I^O3NCCo+@^Qa)i&t|})oJ+$RYib>jAC8GoMs%gCc z8jAcL#OrvCE-H{Yy%XMlS(c1-namSrQIPI`bJB4OR6VJPeM;DU304?xfR~&39Wx?IV=^t{xy&` zFGGCucm@|Q>A0}EjMUPpCGR~0ko~ryTC!7ZUSi`~bVMk~^&EN92nrfQhbEv?lhCCp z=+-p!Xa@9ZCiH36S{us$M09!oHK`*I{4kdTe5n*E^%X(Y9?$Teb*vlyFa;uOi*-@(-nbBvYd( z=4N%|hnrla8{I&gYF1%ikad(dj0^D-Uy5yrcG}$e&gbn%eB_b<~mq<@I1N&^pI9P`Ah(#l0W#<_tW*URku`0uo?KPRM zFrS)<|Esnhwn%USW}`)uYhW(gcwukV4G5A2^pG*q3FQERiM4ltlg@NY^x40J>r z7EKLc>43Ht;XrUxb4h`x1NvGz1MCwaF&Jh5(RF}vCL)1pq@^0POoNtd5QR%z*Gd{g zr32PlL<7MsttADW4%lmv11((BMz)6OI>0#-xhPV&W&qoDfO{tA4-{e%lxLxYTCx{v z;to0+q3%2{9w6|}AoI-t{u6}as3=*En&r|I+o4-Kh#4Tw!1FmLuw(_+tiYBP*ewNP z2ADJOCFdmWti+R*c(W3prQpv1!=GoU@q&Nn#rB6sZ*;OH)`MDOWAr`D2C+L?+^r|L ziU84^0(xOe4jj11c>uEl!15LP{&E24GN>S-HJ7+IslC|r1lS(AqI#IhHx_2Yw}sCI zqc9%D@)%|)r1%Uxly*N131}dJKiiNG(@Hg(g+eDmVrvL0Oj{C8VKM?&ITp1qC~=WK zlN@&ts0`JLMETNEnGbQvqy<*0`Ow%fn&MrNJXEHj(r_0es#n$p1DQiJ&FNub8mU7O zsb)P2lcd}s4@%R;>D?*ItCjL>JWi3GkyDvo-&j>0E*9fT%PNsmiVi19B`hjS@1|I} z%%h<(g^EFOWjI0jRftj@n`MoTsmTu2qQp?URH~u0T8&1;6LHH#9G5nh#q$KvQ=lA^ zLQ{BwrsQD|1f0Jya~?j=U!c{lJWF+W!WYk)+}a5KbRwWrDX%O3rlC4wkr&wo$H(Cv zu%QK$4b6}5G51vrtEMqHKe2@z_jjX;Civ>O ztWZ!+*>)@$a#VbXF_h#Vwo?;eIx(vtS?ETzN_2QwBU$66Ezf=gw(D`J8-E? zNGtt;k<(-^%n*ZqF~*GIyJ}MO6Px=D&i*v@iBH|a+9oB!Rx_FYi-O~Jge6VCnral+ zV!2uo?J0o^4tgO74XH#+J}}@sm!N__U7aofX-J4A>m1bu#T1s8=oIwrF!!6{aq#_+ z7Jzk?dDr3`1WbqQ-}=f2o@Uag84%VaN94Ui3q~_FAk5;sBm4=Y?uE+GM@tRH_N0}T zNU1Dv%v(bOe>xcio<>Gzl%tT=8Ce4!8{WJ%kVgK0$ODoE1Is=}_-D6i zah{`b=aq8}g#&e(c~`qz(q@r(`V>S9V0XOLWKy&7pI`zRnfn=lg=Q)A5ORRME~hy2 z=QQ-7M*;i}5*2?>_V4<^lh`uk=w>o2Xp*(!m;lw-{THnD2@cICR~ znv6-rruNsuWS@a&CC5-0pA=_~hlxa6f81KLZ(lJtqGt%TtPF}b-lldnlXXjvYcz!` zl04%=jL2h6);13A%T=AiT-{qzXaPm!Zp8;D+-iH@rEC!#=P3w{JkN2FfbKx7rl{AU zZs`P*F-oH1^fb0JX5Qn|KZ9+b$|s78>#DIi`=G9_aq|9mW=#UY#hCX9jgFFaYCu+K z^$N$+#JLy|)-=bi%*mCnZxdTcTpS8*;lTQnqsnacNSktCyJe(CUR-rs(YB_Rvi~FL zpkY|hiMABD$??|LeviUdH=Tq2l-2DW#zvDA3Vdn!8e1fgMWp4B568c(MwWFPKc}u+=n(U}x zjmh4d6jaA_T?;MpHnRbt-Q*3~$1um_O*@g65Lsi@sA?#7b>$ug9Le|SPmFTG z)Hya`5+mIti-0A`8N3o(PV}Ol-;MP5V6Yj(nLDi@Fz>$ zOu?l@Ny;6?_gCTR6Xo16L@1Kw8)HX6(};)w|Cj`OSvv~dnf4C+J&)eu9mU09BAA$< z5E?0XgA3%5&%NEKF8hPniza^=5;k_jHc%nJ4cXlJ`Sm{SrqrqR0x> zDPH_<;#wTl3BzZQ9|o&#TPVQ8(DCBI0k*a+o%PD(zO8^nuvrRn(C$h>i()*VEgqSJ z0IhVuvnMXUAm@H@RP=q~Ns7su)&%vo_0CXu^8X%Crb=?9qWhGL#It;hq}Jhd>>B zcN}IO4<_kF$u4lu;7B6WC|L>qAYNI-V&(@p(XZH*Go{xTT?iJKtTfKabVx8Zn71Zp zIl8v|<_)%m5(mRtg*?^kB`TnN39Mvp zsita4HfNtyv`(Q@lgF!}buzZ_5Zr@>?Ow?>ZmA02NAu{_idf1q;u`CU6#s@UKqHGp z0eFxPE06AY`>aXG7L);kY*Z{f9}vx~y!@Kc#2o{@75>QEjPfZ4`Rn^M=AINllimBK%sda=5@)wu2v<1^xm>-+9gyO8{5s=46jh9%IRFdT$tR7fWdYFJ2&{uXKJN&%Ts2 zBTnadCM0jMk7;|`y-`J?ep+fM#JB?kgFLlZwiItMl5xQBR*{SrEv%yJ<5EX)P-M(E z(He+^C8syzu4kr-ap<=W9g5aD*;o-)%`&lLR2*MDMlz5UK3_&n1LI(a zW`N0dnt^~OZ97TS*z*sZwo~Ff?-~@X>6!!<@0G9KyM0_TO}Wc`}K*$SwD|I z>K%3zar5h@*SzJvLAnSvxmO9fe)QlP4WOGa4=Rf7Z;f4%KHj)`sVTZY0e0CDY7+^v5vH}{W@Hh+tyrOdqo-eQk zNu!Wb7RD{Zlq7(97>Vwt6weC#~rq8%5lckCVnxIl5@HZ z55J@Ah?n*4$5-2sxY+DzFr}cGY)`kY0k#NNvWv*)ImV5vb(d||5~CLrCn(g-uu^14 zp#_l|=1~@H9VP5Fx*aN~(@;qWiZavY*ODCD-}FwYjrp)a~Q+ zCYif$u&X`xsBeKng7&WRZL^@knU+D6=t<&q`tygUVhFZ=cZl$sqb=<_(+XOx5l}9z zX(}Z+uIP;F{*l$1dBb<@woC?OCuzn+G+cvJ9KSfOs%CF-g0if^d^`uy1JB~78|F#m zo}~1wING~VVrpp-M9i_uurKMzydJNG#$U2C|EXq)$%sq%6DD(>$#Zr)`9HZXo<~rz znHI5bLhLDaH%^wTCTR#~K0%rwt-%sS)qqqJ4~cSJtpb`gPmP@ra z%w;UK)}{M{BDGUGuuiPIuc{XKZpC%?URMv&h0M`(Sw02|4PBCim1&nvsrj9p^jqQc zs>9B(AiP(ldJTTK66Ze8_k0v~wrJ)l332029Bc&J-P*@wZz)bW_Ay=}A{EY6gN+}WNuKXHOD;Oj(t{=S_}v9`z^^@)AbnKyFkk>qKb3I^FQ z9wrFkwF6|Qvw_gYpO9qb9HvHSj6P9MO6BIw8qwp$V~lsssX2R~anVU88%KhHA2et`mAepNfgsKF?X(&l%e8)( zBYox|@wZ<0_edMwJIhWxl_l)1UU{m{nf+BD9hVvB0XsI;ZhV&pGRJK5MR-``6D7_2 zz`OXS$A|%MbS!i16JMu|{n&WAbB4)o%DTqt0*$L5OW94XTAUq_gYJG;Q&3QNp9~k6 z+*iRC_j5eZG4G2}($*!yZp({oZRIhzPKk1>bhwvo`Uc*|s=w)&z#HJ}WDe)d`0ZQs zmV5We^*Aze&C8>0p?jd}U(k*e6A(_Bt~{yP9J^lkZmBCnKQOmHj)+tihCyiU2Y&ox z7n;TqXP+Uz#X8mT!4j5Q1$We~W<6z@s->vM?r!vlHp|LjmHT)cLTNi%=h)WJg(=Y< zKd)EM@PN?2zfMfW5Pf++zZY=?B+>#|s%Ls^tV$JFcg@gV+qEZeQD{KAOQ(oc#VZiek)tA?*)>IOoC#YP%)&Cd0fA{$v5 znd>A{NLj^y6Sdg zg^}2uf10~~g07v_U>Z_;1w*WOC!Aral)ot>HZiL!C#%Xi=6iB`KwwLaF-`ozaVnqv zKE7O7>D9<@=pFBgRoIt1om|E4Ir;Vn734o>W$>hrZCUAKC@_M4J@+}y&U{zh%m-`E zs1GN1+04)8ht``hs?^!Ku=+D7Wg>URUQ;662)k7d~!Jz33L8x6b}B4X3w$ zbF|aSXdJWYrW$6+gmuZ?spe(c0900MCO2By?n^W_Epu#IRP{R+TlYf(5f-WBg7{e^-%R7w*940Ie^WM~n0vf>sgfGr!Dgu8_idI2`)Dg|z(Ie;iBU)wk?}ZO zX3{nb>?!4RDnM4>c8lsU=j_-|N?Ip*s#Gd)CjPQ5-I6q^?Fc;6GWGWz)nZhsDc1|1 zJ{9ub;t=bVPK?kf1j@S9GEAvNd2qXx-Xk?4-X7&zPqxNr3<6wySSzKh>6TctJK5>T zBf=Y8iDr@4Ex&Ebt_GYl4s_l7^M#5zT}i(8jgbH0OzV#hE{AtweO z+lp8j$e8aWt6xYCNJBXG2X_h}D-iBtk_m5Fg%oPajdP|EDvAoir&J|vxo58tyoZRK z%;#(erNj%g5Ie%B-sGZ8A=A}h`vo#j_5_@CvtT>&*jZ1$4o;T8P_#Dxp6j)M9k@g9 z{v|BHeh#SQU*7Ov8n5mhik*sP)^W@MEPUC}sDUYR(-cljk{Ya(&x@PlWVWmZ?KBOd zD@X(l7mvF^lQh~YJw<5I{yqp;T@;0Xpc$@lpVo;3q;x6e|seMI2@rnu!K%)@7y2rs_ z@O$>Jzw1bGRbqN(a=A6j)zpBx#k!l0tgNo#!obZPLdkbxf!y`x*YCq(T#T5^7N^k$ z4L=^9b8{9HviXs|l9}>|kWmfO*5uxYiwHl1>|6HMCs?k${F8;C-J7_8&ay2mRm|b? z;#zr^E!r|zXTG)#UtLYaO8tXsb$I_xVN1u(Kgmm+2NJiYjGW;Y|s<||X>IX>1=e#AFSQx8-$%7jm? zm&>G)U*y;{n{C6P+v`CCd&EG0zfJiF_8_@^}nfA~#cMGUxp_cCT! zN?r*kPt$wKK#ifAbi)d)Nd`lXv6jJ4UODLYh$fTO$UWgio+HI2aBigp6~o5O7oRCa z{`Y1Nu!qB2V8*v#qF7P35!yBbbSMaAVE1moyu&mTF%I`ah5c*K@_AAKPE zW$(Bn_UV@T7AQ2IEV+sam&UBHosT|&{JKMd!r4rg27uZ;(?a>AziDQsE4&fJl{jxX z9*273#KmE@SxIc)dWURR}ccnn@a$khMsWhB7BquG1_vER&^p@UP)y4$HcmE{o za$W{+9O_fVHNm8DgY|#05eTZ%WH}4|Zfrg1mPoI5gv|q3`WveIlaDQix&kRtMtW}o^XN8ntrS84Y}zN z{jiA%le{J|OPc0m3u}uPXcyw8 zV|^9qdj$OX1N)ab9^OwLrf;n;(PEM>0GGTH=Xj&|Y%KjO>eF^GJGb~$3F(!-s6h&o z^e~~w=0`Vl3S=YAkoyCrOyya&#Adi)Qg|LE+fnj3$&Y?&ZNd$CrLra!fnlsrE*81l zU86ZuBxPt4aGmW5?H~gI9XeOm?CE7rrF8dOXG@nlK9Bb>4;d((Gs_HJed=CmQRC}| zs28{zbk1?=@cpB9t{wh%@sHM=D14E;e73iFL0#e*jaDOa=LOyL(om{8gy#;ol&9SP z?IKrHax&=G9!xp}-QhHVq(6g)3<2A@DQCWLirG^j%BN#QPgGc@xc zB)^^Y!pekx_1j9lc;6dTyRu#p=}`T?B&Hh=J&gQGX+zrR&BXz5hNBJWEa$taNOfmM zzddu^y3XP)QEw+p(z9=0b2qM9Rw34_FFne~1bhvIypi7#nQdQ?izOl6y#3<~3L?Fr z{8K4gOL|6|vk=aAaK`2>=}|-jcR2eb?jMtZ5Xj}pBkGBG2AU9vRBSW4XrN5tmJ}?A z+4EVHVPiS4_^-vJ`fDb_#V`D&1E3AxP*hg_wTYX&+|=LRY#7d#yb-VUEzEFg+)w7vx4n zu(KlGa-10`ZfG>tf%*>dm@2}*VC-ncQRH+QFH`Bqpo+&2XsC(3b`99OmFyL}jxNY` zJdkkd;>O3zNL!&ytX-=v&b8@tgm>=(cb`a}J-^srV@pCo?XZ3r%FP8PgSfV8PL&eh znf~9vv-C=OB>+`a0CO>(R-xT=DSDS9;s|LnB@GQ@ZJ+XC}#&myQ9w?Ir*$52|kBZfrvq;GcoZQg%MX zZjvXCaTVnetD-A4azMnaR(X&!9&oJ@fTCjz^A=p*;qM7y>V~O9CL-CDB4MS#vi8;M z^{MHu44ib^gMsPg>h8Q5JP?@hwPCg4j97uOK^2lMxmksn*h+g{1T1Q0U zF1k;MknBpKpyPKFF&%GHDHh%~H@iP5z$UXwR0kds04T=hHzjPlq=geW9R09vSXpen ziTOP{lq3aq!_Adfh)^R6M|3GvubXD{OBYJr8R<}RG7!$+@2(6+wt<8KMXVW#B?gv- zrz3Kbdbbtk`5zlAr5WO(j>QQNglI%Vp?K2b-40W@?WMmKE2-WwEVEn}Hl-+w zD{LqXSuX!S;qtM>B%2-bJ6AfJ(W9S=&@-jRFizYXpq~$a4+GCKfi2cGg0@m>pJla! z+9lw`l$~i0Kk@_ zzmoP~G3NkHa|2oXFs5h&^NqnBA#U58O*&9@u=HxfG#5Iw>c}cyKPpQo3wp~XgsUtK z>3Ttp>N1Ip4D+-kJrJf8PL{}-nmtAY#zquD^n^KT$ zi-J?&0AM#a1DZ`CLoO~DXK$Ba0Z^|i03|^(n7Fm7=WzX{xEs%cbxXNWKd3rxDhrmC z7?3fuVfuVfs=z(gLLun^{ot+|9P+Z1&WT5kd@Ar%@P{>O#t~8Lk_|mcINA->MU#$XGfB)3gq}{reb;KQ%xDN zzci=^);v{jod!V;xWA7qK2=BD%JCQYRWBA3NhLe9LS}UxAT~?uI z`R&voORD2Se8rA0E^gIa=oNqauN#A(a=SQC+Ao0a6m8~4Q2yP#8tZlgsbOP_WEpnI zQTU2w^@$DZZ4%|hIHWB)z9f{Acnn>~pl>7u;>};08p>i*SV`4y!{8+YqLgx79}?L@ zg5VFsJQ|)DcKTB`YY=t@&BU_M&&whgn!jhatTBE@N}4yUhQNJacqRO1(4}5%KUiL# zM;j=e%bD(w=Vz*=@M~&}nDhs-vw^8;X1&bg$4o%G>vLz_nxiG=5Jms5O8L1T;aMeC zD?2OV82`^z^czS8J1u~iVNI+$HQbLrFwXQ%L95>v@gtyUB6E_jnFbx~au9wK?Oxqb zqqJ!qZ`vWPF#8I-efg4nS*#8wFvMk(8$zf0A=Tdd-kB`ESpz{GSnD1EhD?%U7VkF z$!*w&CVSVQX?vI_Ehn9$U!c7dI+@5bJtW}$`SdS}@TbbeZm2+fv^Z{+%ExqGE)Ujl zz&Q^OX*ezoEprXMWkGZXvJ1+;hD`YYZgDJ`9Gr|>>slWf6>XRo5|g14^jMp^6;#SG zex!dM;E9k12m+IK17OY%o*WKXGN;VW@qg^GBUK`LLK4-JaMls_ooc<;cizrQHpjeNfJ9^em5fVV*Z$(bnA)@`}Q zt>NKgcMeMRG zLdz&s{gZzywc)RGi6Wv9xxF;8ernfV9@|8Qt64`#!?5QMZo!*0j6RE5*l%NMkdoY*04HM#<^Dm(7tRF@I|= z7vFPAcb65FG-svBw=lLAXbNJRk~^6EO|>n_1*~1>)h-O-r$jWM|830O5?4Z;q4t1pLbt?M5iK?jg{2S6S?=S<^ z8XvGQ(HKBmV*)BAM5ItX z@$XV^*G@XV=N@IeZKQ6h!;j%ckT%RFTU$0IAWQj**W^3r3iEN}#a^;shQt|}j*qjO zasuqeX^!f?%CP%q9-nU*)t+VUbC35BHYFxr!xtf~2r1jP%Qqy4RT)_E0jB!1r;S0Lxx`I0V1uqr}Kk=-;LYuALF`l?QRIm0p^K&q<9>e)fV2Q+LWk zsMifj#unuI@LR($@d9j^Pi4pMM8i+3-1q|MO1uGe89uyljLfXLF1;ErPWC!(7np_u z#X_oBx&I8o7yH3-5KIV*egac|Oz8&QR{3=~4AE;1>p&YyDafLPstVm`H|p6AwdPZb zzh<&|kNF`;s!HZ;9V91SH8m&@@Wgf6v@SZ_I~}NqXqdvu9*vsmQC6*5(kS^}bx=KB z)(=ftwlt?8Z{r)(Xq_st$F3BFHUDOdtVgo=QELF>45ZPrSbO36T#)iz>19=gSBNlG z%6BXAg0G%l2%?9peV7dX`U2yIl4L8q9$r#ltg7yxO7Yc_4nL7L$g0HOzkKSy@;rP{ET-6IVc5=? zOpkmQ9LL`??TVjqN+pPDoIJbB8zJ0L_+oT^rT{w1iP-+MQc8Rt7QFD3I?YZ^9C(Vy z$WK8g-$P#6T+TVr!i|A#~y({eUUa=P5(ALO6BIZ&aKxU zSZO9QnQ8+j;u8cmzVhtOnrPd<5sIsHxjdK2OhI3IDDr?^9BrA=>IrzPU(3@Qy%B8e z6G`EDNuvheuH+5hBpzL7ATkXV8elTp=UY(-KBZ?U$#qy&Z-C;ex%mmFBHLp*K#5gq z*N0?cjgR70IUi2^oYa!0En(QNN50u#LsnFZV*hyy-jkdmQPa=pM%ArGB@V7WtR|C2 zqtga)m7P8NjMLLup1-q!gRKxCcdx9)LyoN~WU#z3uTk~$PwLov(-KkBYl8`s zq|TMK`O@08Zdd-!BFN6!3%j|fJJTgbd7@r$4#7OXz~&G5aR~q1xkr9|7d*i9UJ?X$CnykkjixUM=x1x$}{w)NUhaB?zCOnNUjT!CJ z{&S?&k&$|M_~JV}P_wF>)c(q(SbZzLj6T7c-BqGr+9%A53BkNqUKYWxoOBvs_`ikO!7_0qcf2xnYTT`^HV}O}Loo>-|vo#N#ts=HipuAn6n3 z@bw4;VoSDdZv4i~ft0XH^Y!V-50;?>unX+pG-h zgLf)3blOjSh{wuLR@9m{M+1SRd-vV@qu)HUBI|FZn$O0<-$6lfdRBIcVKwT{=zsG! zXS`p1$95^|ncNJdh~JvZu*1IO#=KBv9zjT(`)14Js~gNe_$2r861$tU?mAp^hRGcl z$Dy{fdTwz+iRT9R=LV+GK`o`1-NzT}T zOrcC7{(H~v$aO_?cwEHF`c_Q7w9x)iqNy$G^9D)OE_2vBjOtHP z+s*l}${*gmB}UWO^>^-SZhJh)nT+QNv+(U4e&~Y_22VH7o*oDc2XQCGdEUTsVaV`- zK(sgDId-hAgy{XkEb4;thSK!0Z&UsUgVWv@mctwcKDDeh296q_WE%N5BWCwkfFd0F z$FZgqm@4t~m&aX%gX_a~hI@Zs@>J?7DTVU$$%c{(4T@SO`!xfuV%DP4H9`)cQx#!u zz4=NqEufqA%&}{IFh!A3V0Kb6$TsY)V@RD+#SFJq+Z!7|QkqZ;iB2b-qWnvEu#<4qk?+_D?_QB8;tJUlw$TZ<2f=4(;yy!3?F76EmQCeF42MCNw8B%{nM_I1CuR`>Ajp58*z4^HrdqZ8V>Z zZf2v|X%WwHm@p4e6sT0NkTeJTfh861ulwk@R1g8KUK4E(dgas$5{`A=7!siJpM)GG z^=C$&RVvajsN~+wc-BOnQHgWn&*8+hUeC^pIL2dS_JBk{m4*C`G9m2!@Oc1o=T83z zih{yv2QtAI`cnA*ts!>jdH8k*+rQb~xI534lViH>J)K$S1%nAtZYsWm(-X>Fm%A3` z5zHfFyO)86zNNs4T>inGy1Zs@i9#$HCLm$i10yjVZeiy|JYtU*WGW97@0bS%qwZPw z;X5fKu~{dQx3lVr7QXn6nvnYgJ1o={H(}D%pn;sU*IoJE=k#a98=lPEs+@2bMUv3X z*o=S9QLUUKc-|IfV_-TM25m8eAc<=?3>oQpv2Vg{X;eGdH&cK#rM%&ms&9R?E58Og z%6s7=l$_Mdccf?>r+Yz4b&m*Wdd7*Ug(PWjaK_Z=F&}9q_xLkU_zX=#{)sDGa68T$ zRhq*?dwWeik{KUdgIRKk7I7N$DYhs&Y^kkSRq=aCa*}6Sq6_R@6Zd|?l}|J?QnMSWuaiY_q36zt`s%!Gb5a$Vyg0h4RTIVH{(CaEN~*Fm!R(7W2YTsDI(PzKzAQ{0wqI zT>e}6#hklV4oF`b0GQLuj2r=U8KB1?Qmu3?AfrLc?)YeW!KK)ACNn9{s^W9h zQkpYT*EmI?f{vDTcy^0S#9c1Qw+okRLsrdFjz0?6bS6JLB|b{R*;J|-f7uqPm8vG` zRxgw2YEb5xdZbiOHtJePw@Y*-AW4dmnM7PJc{5_9=`*zzSqXaKHtJ|}q3c;H-2~_a zpksjECeb~Bt_Som2od|UF6DrL*l=BrqSPpgJEfLZ-csaemZQQ+iC%1qGMqZszFF+2 zFXKa&97Y7P=u0Op-A||#0=CSkWKbN;Nswl7x|0#X^*BOjah(EOt+>wv=%pr^F8y^; zAme9QE=8c&s1bo!k|DITX*C0<&*b_uTsBk?)uWa8i3)SP$r2!aCd-rRpuh%2gBHu9 zJx=SB6lSN#Vesq3s2GxRBCi7jY3Ae5XHBrc2MPpq5m4643)jU-W3`k6IlYUuYD7u_ z&}mnfrdTO@zD3HJ1}JY>(~}JKHq{pD^aP;7ilr)i)=@sYK!Q`z##`@M6$2oEkNp>y z95B?&Qh!EdoG$=>X1V#%OWBd#GM|FSXZ;QUg2BSL8`Zj-@mLdpf&l@@ur;d^gEymb+8(M|4ZCpTDE}kf&F8q9?d>jkB61-E;0bF9wuPgzj>C zo8ZZy`a7!iDqHKB?(_d{^1)c^ec~SVj92O<^=VP@1oN*d3VxlYMY&F|)oit8W`3)< z>&~w_#BAy#e9FPzPv3uRKM7PTC?Txfu^0URp#u~bCdn$(ht zTpBp7_Wswl+BjEx=FgoXAe9_<^|8dM`+8F*=chCmqT@dk3@s#@)4b$&ajF1ZGYBOo zaUWHJx2-L58bAd<)fDwL{;?t%`E?S5er_3$nM{l4W$mg(zV&QcJZj2AxGZ^cDx1~; z{i+zcDe#1IEDQ_h^5$bn*4$%RD(SqZVu}G9oX>(nnUPSHL@U%WJW2OYZpK&bzCN&9ZpUow9bncCC)2jrKcFMkB4n z%=^?U3dqY?vY(O6;wsA)cuK|xHE%<{M1_lWU|1Z;ArMat@5wk30=%Z8=Y$ib8h&fp zEYhf|9Trk;DH})sCFvrh8syOH0_|#?^*iR#82!*mE20JbB0l+0Bynv)pOjXp(W2qf zP`X97GnRJ`*zsV7ZG3pgevbw)@fd5~fGfU4$`$EEE5GVL$PWU)D19$z4Y!4c#XNJ=UcH4QBtJsQKv z#4MbJRfI@UqQ$U@O|$>44so1Z;w4CwBw317Y0|lQc==_@k}XHB1@h!8P^d_;5&=P_ zLduk@P^n6_8nqe&;oY=bW^A?2UXT0GQOCl;Z+F8bMH>IyaMN|S!zYt0vdJNrJn|`^ zFqq>`IPHv+PAxCF(`g^}*t1(l;}UN0CCzxcy}a!6ixxE&euA+iC$IEc>tG|Ce|}L@ zOCwZq9V))g3tn&U`1+xH1D)NAdpO0{IyuE>{)i(zNyvMTSC9P|f$ztU(r-VXbnh7W zyRRC6w2b?{=`v-K?fG3*t*BVA`^k9N1Q6$#hv+W2xexpR4)|YGXzkI8qswcr=J2RB z!m}nYr32#QnqT$#1?SBP;NTs9D6JuV^;112HXy(Cp8kEbvFSyv=~t>{30T_$Kmo+O diff --git a/fonts/KaTeX_Main-Italic.woff2 b/fonts/KaTeX_Main-Italic.woff2 deleted file mode 100644 index b50920e138807f385d0b0359f4f0f09891f18406..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16988 zcmV(>K-j-`Pew8T0RR91076^<4gdfE0E4su073x(0RR9100000000000000000000 z00006U;u(d2wDl83=s$lfzV`upmYH?0we>33=4t?00bZfh;j#m7Yuo}gkBMuFlG6J5B*sHHKd(*=umo3RRA1q&Aq{Qq;*?z?Zs zS6lWBvpA{|4kRGzglV7W)AM`dl?u#krjN&WNtdj+pK9tmbDj6g11qm=IR>q4=|=`? zti%rTtj4WAvC1G_rIr^=2^+WshA@nFohl_hT*y>e+7AVqh%8x7!MALuOl3;G|JvJS zZ2pf6{GYmVua&&rfSf~>Q|VHyoWtv{ooO}gpZNn4!G}Ns2Wky}~; z-+Rx%Qf?d6zTgLFWNq{L)|&XtUDJ@rBvM+z<#qC}{~v8;7xR!-65^qpmB9aR)86*I z(Fb`#+6{RXz>gL8A*j+OT~ahDXWkvbdrxCqZx*DH?W|_}L8Ap}LZi^ z0IlqWBQJkKu7V{2mMO|~b$%JDQZ#*va?6C3FLd5So^>i7j8{2goP1iH=I;vx?RqZ+f%D!E1Q}Uf z{0KzZ#6dL*1rA#A#nlOe2*^SaCA87WYSuH!F-~xf7kHOX_>w>4Ow>GI^i|*Yqu$(o zy|edpvIl#l$1ki=Wz?DEGei2WNuq=@I_Sp34KMx$U-n%;?B1Oo?y(DFR2sE^JKR2X z-8G;1*ayx#?E#1FbCY3f%;g&TKkL8!pWgZVe&=t0G8VL%TMb-GT|7;&|I;&j zkM`FvLW;i-j(9}~p?4@p##%xxg#6NNA;2G8NdOw#s3Z)rVoM@GbAqhjDO`sP5rWI` zddKCYp`S4K#-PLrvlAPlH{%u_3>X|uvq!cmzm;uF_#UBueexp|=;6wEg#<-aPj zO2>wF3fYv914sg zp$!>z%#4E66NKBGCU$09PCSu}|0gCgJH|;w%eD_&Chn*gwF-LfJu|~jXh6f26o5i5 zv=E$ZMC1zH2?(VfMZ%L2!B2vMv)L2^K6_*wUZT#}mw<#y zTcCP5%QzGnTzj6hJM<`XN2wET4&g$%Jpob0t-?9S17aH!^vo`#aofV)Go>6J8R8Zm zNFf2FlwhQi5Tuq+q>(VBm2jk!1V}Ft$RG)O(;y-=CEix|yr3fZoHGY4ncikgezV^v z&Dhem+25PYh=OYd+egsPPDGUiiA~su&DgL78@6J@N!YLr8&1ZC?bvV%Hk^vh&IIbb zMOpcQ%7%^xp@$fX^vESsxkHx!*` z8PkoPf1#mrca-J;XlDa&{qM;^p%zk!O@j2Oa-#+Dr;zq^zsiT4tz5uwl3bw1AczDZ zzuk*U=ApV*m(1^wCg8AZU;#2L{1hrR30daMp37-`;FlBOkIdRT&|RCaVB_{Yt6Oig zA|hGb64DR0Ku%f~);#TPQv;Nt5n_yusik-{%))wC)-f3cBRNI-@q?L75&Lhq3=ygJ zHDJp`QK_?#k|Y)}E8Es2T81J8Me@*kIve5cTC$iCirP4=sD#uX8n!GkC;~8+9 zc9a5OOd8*czk`^sP>VH@6N7g;+AfZVSF`*cjF!rZB_EQEdFFSNJwSrcm4$b6%8opo zXvYIV#if$1T0y^McGRQRDm#>2h&;LXd3Vg#!hHx;yS>VOurT}04S+?Nj4LU${h0DQ zD4{efI>u1YfcPSf75)>El0}OExlJpmQOO4qcL-TD3fFONXZCN!pp;2qWHo!)T0R(C zG~=v#izt_SQQ^)Ft$4~h&dQF2R1yhXjd7D-w9q_{-m3aTsZUF7aD6c&urUlf>Bb_X ze^7HG;!7xiehPCYT8nudXHB8*?l189t@>n0~k5)@!|=BAippP zplJt~MfMzQ;DzI*fma55O-#_6u@TV#NM}<(DohV0rU9_d;k+YYeqJPW05NhTH576H zDIGwK{I$i5iqm*>+n1Rs4YJ#e{jA8{*82y5vJ1i~ko!X=*mzljpCu#jie z1<%8NmGYRSJY^}*S<1^&dM(gf!SfDR86R23XO{7mWqdECp91|BxFq`zr;gvhJ?-;{U?B*Z z4Z#qHcQ1Sa31vZA4qiwVYhxt^5N%)GEmGIal1(-4o$PUW>&S}Umx6InD){m5;8B#5 z==BSTLIuUFlk4@yXqthNP@Kv&e^zBp4j)Kn*#cT3kr`rS6LJc z)s=K~)i&A0Qc9A%TjpT+MFEP+l+uNR$})y3(km#Q)=DUejpMv5!LzvyDQQ`WK*wB( zWJ4!Qs`MI-UT?Ge$sV_3kv(dT_za4xDG(N`BCyc+A$=}b1I-}IgtK{n7Gn*xfI_L3 zNdmaU5Jm;qQ2V#1CMHhgK#2sZW*Ww_y7MwE~SKKEVnJI8Ww; znjb!eLwzJTZyZxWxFqgs%z9QNU&UCXGWi%Z5t)O8Q7CA7;V*x2X@GzKJFXoQ?#okB zYN;mQ3Wh!~v{_uzD3yR0g)$+y?<1}HbzVXAfrKzy!UzXuVL#zxm!qn_hMJF6Pnl2C zWm2r-n}N>Z{^PX6NPJlB{^*bjVrWemY`lpPGuxe$q$CQc!soke)SQK2htF3_%SI|; zn3A4|T>#AVR@=W1I?{+V3@6Pr1xLDI3jdNyE#k!zv&n9=Pqv4|zNkB_as*j}S{WFWVj27}?Uoq5_GUyfl@>s_i3333Q$g(#pRCdm}jY~Pb(!!8lh4c!(ZF8nFP;8Ng@P7I_q-Ss^i!zr*bYe_~-*Q5tk z0W=4Ot^I&-u@pu$ph|5KiH5q5Tp$x65Y$PMwchEbTzLgF(9O1!)gycS^Mtk$EPhJZ z6mdCS& zm=bOoVVI_~*z?)u3X(_`CNY3dp;5vcCi`l=v6_d{WKCO4-3EiD7|gKqS$Q@BEfoFT z2%4!aGXYYljWUSeLJx&BA*^Gj$p!gDw~z@XLpDU4YQ1M8x~w#qi$pnm)WFPoxEpJI zjYPy|F~f2~oNe!7tiDDcg2G0`sFAaq-tZGzDi!|rrke<5jghzSDfEQ{bg%;m<6A*_ zO*V>8!30%mfsGQ+xb`L^%p^aMK^}Fcg4|q~f5=j?k+9fG!ZHOe1ry`WE>1p+Y$yG{ zKyGViW8u51|3$HUlCQ=ym4%8#J?!uIB7^#%ECceKCW!4Mni#H>q3)#MM{oe=er;XN zi7p1eLHLuzKoZu7(B+}JQ}l6gL87nxa*~3qB;2DlQrX)8Sw=Y^mkCO=400?>Z^h%J zQQQaFr_Io*kQ5XN9D1Hi(NL_rwYf)}w50n{8^wowkkZHp1<2}ePc8FZyq1A6FPHs) z>5Y| zOhwWFb?E03?7JUsxSywBb-h2ohNxl$yZq8*>AbbZQ%Do?(nQZxi){Azd?5k_RuCG@ zJd_t;toAhjapE3ALbr=GvD?kuFj}Jo#i<#MdMwPq-K=G{cNM`vxuB@ucxDTE$rE8y zBWtURlAc8@r+pvaAlnsZQ95sLmvq4v@lxzebAQyHA@>)@B{6|6uuY_TwG4RK4}#c< zV}U|i;i5Fgsu;X!1+ia!)2$>jNV!LMyG94CG|1pU-0mKo;;CjZEY)dBDA<0IRDQH8 zJ1^;{h9O3+4v?4B=Tbfrk|0bwJm}WSIdLBuP z4}c=2^8m=LPia-5c_hC2hIhl3F1P@;`22sL&&2;L$v=>tJJR131;fPc_=|~;Oc2n+ zK4H}N$4-Tf2E!)U1^RjKln;TVO=7ICOAU9nH2R~OkNizE414K<<2WVf^SA(X%Z^d0 zrHswC@7NcPVy7rk>^LFRVgO6QdXHptyM?4Oy(5w-I9_H^kB}#+`ER46swU%=myOVs zX_#gRD=##!N;5O*0m>JVb7m~al0I7LaEOW^s*qYnJDZCjB?Q>=Auj5E%VPqsomB4; zOe)2ZA6RA(Lm}E7K4^k8ZKT7tPwsMU;&ry#)1;AP>)Vyqr_m3(Zgnols_GXe$a}@E z*(SMf5pM^@^m@oSTw8I@7jbG$CKgK`buz*r+zZWxlMO{wtwClawh`xaXhMm9;4wvL z8LD!Um)v4mY>CnN$oZiBZL(P}&c-Pi67b1v$SDFXb4q+n7%UMK-BM8`+|O9Ws=RSo z)2Hc<9-7Bz>X|SI(NC>Nzg9FGOzHWKC@-EMVVKXPVh|wLJkgKI!5>b6kiXj+&M@Hi zLCcUEF#VT(qcCSQ4Ckw#jE_2s^k|B-Z<_oDw^Etu3#d@bV81I>RS;hj8OR6{ ze&!MkQV6Zp8Z+^KL5HxkyGH**DXiTM%c(_jFQgZ3wmXa*)9L?qZF%E;n5MFHgi+1} zh60(WFk#!#PEijF8nsLozR4%7f(D*rV+kAQ&?$#*81C;=4ic%~ zY{z}7Wya0e-i7x(+m7WKFz9sPhq6MEem$_Vh4@_wM(_9hmn|5I4H%elfE1o{>!1ql z9T}`xW8)?+hN>9@$_RW7glTTMh2KrA{jtU8H||DM0T+q;7_*HeLHZ`p&$Ip}p#jva zrG@7`E70}2E!8LNRg5JDzs^270W$GaD2%``ES5hHZsM3Q>2-XIt?ZcD&m|H7RK%@# z&BSx(c7z6)>wUXM&RcSb(<$&11+6IM+*@Q`Nt z=fNCl9nCAyLnK<0sR3m?+Tn0unRJN+v$qjnd^>`+(ecP*B54m{XO=k}Tl-;KoHI4o zQ%MpF>o4*@vmspqbRSoH5ycJZ5_plc3SMDiIkOR~NI}q-N4JGUEG`U*WIQlS_I061 z*Qf=TO;J-am?i)le|x+{*t9KSd`eM2O~{rYm|3jMHR*21IkR%Ri0p+$w~vL>aklU7 zcOYRthz_w4-`tktH6CuL`bLPYCp(~a!Io?;9Ji4(=Nl#%nr#O zq%sM)EzGBt$albx;6$6v);tH$ySZcuLpFV@$Gpq<;`N1d(BpJ~8mVz@o1hU>*Ru}u zU+YYfx#8y$5&NbQs64Wq%lVF6uxD1g)9H;tcWK755GNbgNfJu1ar4O9WBp87F;YsL zu6T2zd5Gx5Ibny)ci#1cV6EyUmT=ouxW!K~(tGQn`Di}MStlr5NBRe9e0+EqC0KiW zIgL=|x{a*w=U!z5ZjhsbeiD0mdSa~Jxh^%#LSvvaq*6LMC`E?**JI0(00U47!RX+oxB;Pp#FnIo}hyI zx#D@6^+kjo`3d1YQZf37YPDoSf7)wF&kSrxvF^QBCzlI!k(L-3ubX!0c5c+m8Z9j* z1f~^HX8ZSRPK=41W=O8ly$QN+qOUO<*`A(k%4=iKHo!U&>FQ+s6S}dF{~O_UqV^g*40Z^~E-_9ncFKgXFlvjoqcD zM8VQVE+q#@Vn7T}#D&C=v*6F_3D9ngb6udG$m6L@(+jQDTLWW|Ae;2)zY*Vm~#%|ApE!2^5 z2Za=xhHCVAzCzjhJHs=9dLSCxYG~Rmc;#)aJcMX(nBg4zqNA(zQVtUqpLF zX*2H@6E4&Xb_&M1)IEnWJ9!O4%G)4ae?NskC^uWIuwU&)>j&~3+w7of)=LbJNvj!= zaa;JJ6G}cy9!u-Zt>)sPq#!ZXsXT{Sph@C9_tq>jX^4oJB_^_055b}v4^mWV^}`qz z$r(Dk_j?iY6_zt9(_Ir<+oP1*EY>+nM{^?eozL?T#M|Ufek=L9HoqQee-XjzRQ{`? zgr%828U129Trd;QC#xeW$n^5jVCH!V&r#6-?AkN_DB`2N8PjdOekfKM*%nk}Xw0g<00!xi68(;S`l|-<= zzo#FoImC1FlCBCn&NH*b^U@@A5y?n5!RV$loIcwTChg@FdbqG zCD`qX$PB{>f|?4(C9qy8kCW7(PNhXYj%h6s0mL{XZ7vAXbU&k&pbdO^gO-wYu++)0 zmmKMj{d4$TCQu(U`CpQeD;_7235QN)%D50d)nE2^zWH?2oy!c12zSi0FZp0Eiv!)f zhE|*4O#=$MvL$(gJX}_6y?9^sROCySfR6|rK2gWI(?^+Nvugp-ppvR3l z@cnFohB^^-5kQorM+kDh}%64gs)d#H*+jUS3F_c_n>h}J-qnced#N8idT5` zM>_62At+WH{$okvyE7?PxRNr zN!3YVFgsy-L@GIBTD+*{p2+^Vka&_nyqjiB!9g&5WFkNa-d_A3$y%fi}whS?v!KfJ-pJ`-7{=I|Yn#ddZ}Z8h}ehmReGzyAZCX!&GNrCk4O zPH>j8t4Hdsc->JC3tkZ-fUDh9wU+YZ#N!0aS=AxV3-&?|_kCZ{b;&iEvjSYVoUB(R z`?E<5ud3a=qapD6p=VxRQN~25fS#~^G&UvrV#S!Zlv-nu;;AX2+$zsD{!de(CbZ4u zaW6}l8`n0c;>PT@sVCo^F=e)$`E8cPpIjqdoThYYK)Dl8^( zs>s8Axp3%8m5dDZJ}CU!>aVOUDq=u2pz4xKusykwVJs=Z(=L{#b^nBe^)Ru^ek8e*E5*1`t&1LuYPT8z(q4+-fED` z^>Ai}J0O)EkrC0l8bnfgM=)`Lg2f+-K-OMnZGD44tyMD>?OTI}^;2c;5dND5MH?QG zz@`7&;mxDY!^*?X@vR8#7a=WT;=B+y4jV^CM@?s>;xnf4anqRTCj9iuY(K4GI!Z&= zqM}cUW7>Omr4<3#^tnWFl-K5sg57w{-w6bLie@J}7Q5UC*3_K9@8ZrYbdTw|S9skk zc;JgXF+{zv`Prv(n&{V+|NKAC_}%+%e%Pa#XFuqVxjhy1a@81mDDS*_G`TUQWo_YC zZ|5f6ZIEFPO~2~CVn38_cyEP=)wzFv*Y%oV-7*{T$G5ClwgEN5;{k0>#VX)LW#pbP zBIr5@nVVs9Fd(K|fY}rWW-;6kICTNr)xZ1_SoRqHPMzv!HKCYPH;h3)G$aQbXH_X% zkLOO$D?L{7lXn%sO>H5mf$^NZJXsVFD*|x3B9?W|spv!>>^mit4t>AB2veZ(q0b*?Tx>u>b_GE=}LRs$(@rvE= zdnymV^>str_VrCfmn_$p`w+%9mRNl1AD1A$_iQ=u{lwHhqjv77hj0>>;r|{o-4TFS z95_SQKcu{!+OtUe5hMdAEE3O4`s2nxqx=Jt#28IL+8nnT@a zTI!vCF5X|5=k?v9Qzo|W?;sH`RuC*N?ea5mN@Z0b0@tfa_+^piZLWn1SPe%tl zUI~6lpGpEtfcjqLc>B6_0gMghl~yJN!>P)4sV~1(Fy$*udazr|2rCR3_b#3lDyR^M zwH^g(wVNp=9kf5AzpN9SOezi)o@579MuFb`l7L9R__fONL$cMT^@#Me381y=W}j(dgEeK3%drDg9p`}kwL{(gOC zG2g~Si^^Bg&dqC9Bgp?VakCU!8N0d&$8duG+G2K=x3tBw`I`6L%HlkvKIF7mh;JXF z`bf0w-_V>V{)sw&&M67xE1UE$j>SEnBzUbt&d0yMi{r>RBAWRBtVQ##q4-Xyd%o_I z7k3;AYd@Ek$aVV@-knYiR#DX+9x&5mhxR8$vkK9$Qf^{)KWj_NLwT z;YfX8;h~q4b)U71+HHGP`~*U5_Re(;$!BMFu39PSB8(;>wX`|_L%F)^c!R8(2Z2*ly{*%9YDrT3Z z%n?m}A1-Vyo73J58!J42Pj@v45}Ri)Eg3AD z)0%%aDBgG)>TKP~vpBH(!Qdn%$FWjlj)3fQW{v7QMb&O;Fi`&v;IC<~ajtDD?#L%f z5-2&Ct#{0>FmE-F1r-vfb<9um4e$9uP{=Fx2{4ow(tut#hBrDU&+mDAG9% zs@*0Wk3&o=WHLq|xr}omV#-Wi+Blk(mbmfVncF9TQ6W~Y%sJ8k?`Gwu2$-^24I2y_ z9lL)^+;ShRf?0f#K;DNTr8CUXrw9pb(xjRFTfW1v-mpgY3~Xlhkv!sEtvby!&8Q%2kSA{n)5Nc#hi3y2fZbl!)jDIn%L0oULa#?h?exHPRJ=aLmc zr>W=m%bB!D7*it?ArH8+ItV24+f2;gONzuSg(Pxc~H*1aywRJnMKG zhFH9jNkWDhI6BMgGz!@`P<0H8)@%%X1Pn$-j9W~b3HW$^U80RrH=edglB!U|yP1oW z54TlZn>5u6D*s6`?>=4MOpm9bg8k2=@VQ93-(keqcA)M&DYn_6UAoBVuC4(1g(adW zJB-qq4j)N9-Kh*fGI4n-%<+I9p%=9!t@_-a)K&LQ7h4$0ciB2j>@BdyzQkjmiQDAf zbNO%C+TJGq1W?pMv=j)H!_`x`Sm=k=v2sh;0S;_k(_fpb0I~*>uUwt1QnDN<+|FxD z1YC0x8+oTC?gX8YS#@@ESIIGTIe31O3BktVxa8>yIt(#Vj!rKNi8Iw$4~ZPSih%To z#E9?YMh?@)Wk1TD$LE!qx>RitM+xZbD=~TU@X~yEn*&BYfj&R&Z#J})^qZPtr0HLX zQBR%6?*ohnl1qik1k3ya=We2~8IML+m&puVR%Ab2KOWf%-3*-0 z3!Jw_XS{BTBgW!*b47%uPEJFBDH(W*^q$DREH-#a5tddQ7mwtM9E9k^HJI@E&myFw zsGu{c%2sX!JWnOuyT+fYx^ut`*8YJQ_A(ru1$cx3Cd7ejo|5P;H%a=p_gAPY&565@ zbsK)n>XWBxDLp!j$9GJIL zK`ID)gI&J`E|Q_g1vGX)aTR|(z0=BHjKu^J-Q{MeG zb-IYie+PZuBPk2#=CR-XFD)Xwuaz1`j2nZnK~Ap&XBvUBZ9<)4T{IL~B$=e`<~V;I z6Q*n40=u=vxzm^EHW`m-pu{p0Pg zQE`bN|8ujMBn0&gDnRpfBZK)Z-6fj4LR;+ffACN;b0g_%>c355ojtvk+WLgsN*YmE zLLdcSF_w!5%__%FJ`!Ls-z#;Ahu5G065!T%AjC--%_JjqZ!Jz9;&L)PUJJD?1BK0r zAY{)~4?VF$-w!G2llBETa?;p!_(FgW(gFmj&*({OF?8JS##eFmiTM$w8}HkTuE+I_ z)MHPp=YIfu*z8tk=;|JI6zNx6X#qGk8Y`|?KDa1VGNkWgQrzOF$IZVzfNN1O^9GwL#0SkLk?9=RpzZla% z;=vs~>+&XvZ?BOd;A{yF2S;2TFoMgsZIaAgApN;Ko4iC|XOF1xVxHR@jdN5SqTffq zT+@2&Yu{=eNU-EG0jgXM^1IYL?M@@5!ljpXWA~Y>xbz@ID5<05va8?Z^vVH)Xw7oD zIqENti+l1Hz{0V*Ot%TY71&a{1+Pc1Bzi3jo2mZQJxhyh88@YGFpphQlf=zUyr)pS zTO=_WVbPd3Ej~FRu=8-)d3f|5%UprDWJ+wK(_tmTk|q?9SHP;Alg1H&GGV3m4E$~1 zaBFtn{@h9T)=RovINk3wo`9+~HIQ7&(pjak6UfuXcX3erIdp1&Q$L+6P*SpJ^hqw` zKWE6v^31LRYu;{DCfpBZKgg`Qq_@Etj%?YL{Kc@S;+|G!V($bF$Mx__|73&xIBS%O z1StwQH-bxl;j5{^tjQaQIXTNO0Lnz|Y?oKqQ0kAE|$&c%UwU zSFV0r-EJHa>F9I`whRj@BtOiD2m4rSmxga!O8f~&p-ATvpfYqgrRPzGyV1V{~TQr zjgp@O+)UlE0qO}*@u6}C?^Tf>uNXuDpj{NRhq5uZ-z92+kQ0rW=os$?>y<^Td9gGfD<5yhA;`aw+>?r&jjG@GxZDC_@s-2b-O=hx&^Npq|fL1_gbAVVN&Aa$1~x!NjaieWMK{U&xnw)Z-xA9pg(&{E-~>xaF~T6x}~f&-0R&w~U(Kv{Z~X z1Ys7FeYx;fX=NtUDoEArP;P?L(_?&TS|TG8M!6g%zh=&}^CkqA-;6p`L&flcT5>6= zgc{)`UOhJU!~@9JZvg;Z$&C*Bz<2Hj4;*XXIrIMrd*+*@Ev1K7mW$ zzOB<)IOGI7LN0ro~l?#iZ?m zjr%Ko-Et-VO(SPfP_rq8m#5;A=Oz7OBehLj=7MN4fR-p?*)=ZO`k;+Q;pSiAD9MtH zamn-(7HLK(7sLo*6N{{9%k`p*rGw|P;)r0z*;_50AWCChGPUFR&n~+@TaxsvPs{Ru=ti9C=xPDpIG`89#8ZYOY~@ z^83YFBB;XDoI3m_uUY%N#dGgQRsZzGUz;z`iA|hz2g)`8z)De=iesurwJpUSnHT-F z;QpcAC!w+P6|$d2bBS(T`^3MxIynR5fFX0VgJ}WD5xnme_1HmE(nl7Nh8rtP-?&6+ z%L?(@5;Q|%;;HGQ|8Mv~2@(GbC;IheeH@EkOjNj&=B$2qV|ji}prO60efW3>bAvCB zv{h-!xq11|r24G-&zGv3HSMmLkywwzeHl$MA?pE;Q3jJCPhAq=KmctFT2QtnIA@M^M$wEx!wPaA}eKkaqv zP2;AU@?+4CCHxDNJ>%6CuL>GX*vtRwTysY#{(~XDe5;(wuqBl*Ypv+`V4cG7rIzZW zta8%m1lZVWmubzsA65Lv)B7qm+dPix*BUZDOwn9X=y3I7DJdrCFjEV`8JP|GcaUz& z?)bx-20Z{{j8C8beZ_mC!d^K=#TFiW_uAMsz1?D$TKAZ@LvTh$9LX$!*s0_!x=!vL zANmNF2n&D6w_g0Ua(=p;GZVqa(}6A1meluCFo~smZM!1q%n;)^Qfafn`K!Dt1<#~) zq&V@z3t|$)DT<0Fl)Zod!S~F0Jq6r%6dxI8t(mKJHo8u?EY-hh?-$8sK2MQ}4(Ow^ zQa3y0`i0fXZjvzXOu{6($i7i+brEs$&g_L;Y@P~x@*-Zl+$Yc^wox0W1QvhwbWN+(4P)qGadz`+}l(AiaYI_*}qMTcw19x}D0Va2VKxaUEgJ?BbR zrren>TAZo#yn%x_#lp~%(C)l;_(wzO<(xU$NvXZ0!VEA&dv|K=ye}O=?`V`^-;rTY zS<-FRy@jpdfuri0wTXaz#UfOw7tH-n{wa5v68bc@pYS*|27`wd+920ATj^pRg(xq=L>AQkENA3KgC@tNvH zEGnu05^`;J3N=SR#F1vz9lF%8ZmW)c?7AwoT76^r1j-)c49^n}ziNHc$P6Exj*!I} zygX@od1K6xn)T>aqdHA9zKeJZ&lReTF}|$i!3@jjxe+~%VBE7CCnS#2la5{{p`ej!ox^2JSCeoc4s&h8{ZqC7V?}2Pu)D^@Lrp+Y$&+v7+ z75AX3f+W+ZX)LKE-xfcnR(&kQ@UjIQ|K&R#n_;bf9gLez`9H@+fk&Xf`Hla54NVzee@AXUAcvPP&+Gal;mTf@J|JJiDAFeZ z3Ph24=9^KEGyL#d>P?<%1f-`^Ms8*XpypG}h5zZZcgqkv3z4vCq_@0LIIF$b{|xr! zqe`q|ZeM9~*s6S(*A(g2`T%nKtDJD}4_t#+&W=8128%M1((ao6nN*o)(Sm@lTvT>Fb9yQAA(Mp zZCD0ewHc14J2Y~Iv{PZUN~c(GA`jND{`WgL_i3==?Kd(Ke+`L0Dh)A(k}6&&cophb6_6>*2<$v#__QsJQ%|CmZM$YG$@z~946W&%=lNeC@=LkvzQiPNdnswNsem&cZD$#BZL+I4D{kR8ZU?T4_-%&2Y@gG ze?NhYo)cwfKmFcRi1GSJI@`hxD5Z<8YIz~70SbhL z%!mV#27yLhbtQ5#(j9SW-lX7L{978p%Rd;rcsK>)F?ctOcXiGx{Fgi7#Fj-UfJ$ga z5y}d85u_=a+anR6zr6Ao)U)h{w^4%jGp@eCKDPK86ohPdaSY4Tiy?UPD1uBtEJNi2 zXj9Ep(~#MiKwwmXctpm3}Jg`{!=Zjo6qzNh@*j@z$-jR#GvIcyuV@Djo{QyNN3@g8Y zL1#&j%^BNQkDORI8zxtnAOzTUZP`6OA6i(Byzu?w34LQ~RPMmhrYZZ9nk3SMVYlYN zX?k3(=m+}2%hImhRa4=8Ya%%ivak`K37^jz0Ck1(s$A;3!ks&DNI^*a8Z|N|NVF9*8!xvtBtmW&laSo{3W`aq52C{ zJ0UzCXN|$LqLHWIxyNw;Kz!1~FAfKelAxYkl#=$aa#qDzpVc6)(9{vC^gk}sL2LQo z2Ileu_al~Ws@!oLkO=4>NM4!z@J+0B&o^x`42NGa zNES+DOI`rrS0P1{%usyoriUcAQeqVOdLogyF+3badLFxS*?Km->E$syBn>k_lv zTRNgp!imG>dET6CMdnDxI+B;J5^E(_QlnBnloB0DT)Xye`+0K22dD$wJ7-$c415fMo*m34B;m48Rvbt3n9LTB)2R zmP^y+5G&GfXwa8u*R&P!gU(i#xRYrJfiZzXhuuCyNwDFL)lx=~my6(FU8P+d9PBAb z8565hK!eUU)dmYSFtUnV9Z9e>gM_)lKW?o1Sf4^p75OZ6-TKA}r7DYk#-@~bFs|B5 z(fL^_%VlE`bdjuS z3fB5knP7p_#P}+$aA}^^CL5%wA_Kur%FGZ!%jJlyM$BRfK$Ijw9U}x*V>m@%*#11D zkd6!BlEO%bq>@y161Xl0DcPlx9e|T81u3xr4k&3N5>V=no7J4T!u~R6G9`;hXoTKQ zS7U9+#k$W1O7pYq(q@sxxCPfNEXvqkN37B-hU$2NC#~3I5kQiNZw3xQFs%6z@y^h5 zWf+puQY%D&;)!0jMJYiLp$ulG$YEIl$t4801Gcwz)$(~>kz6ewm(L3p@dpcFo)7`{ zrV&gn3jz?eWslbRqrKcIFa9Is$k&{^uYEZaW3{fq(O##4AOeCR$W3vTS{iEY{}Hqp z&`NZ66My6CkgNf6mJIfIgG?U#tJ3*s;SGoK1b)RBmg2&P>oYS{^q$ z7n!fmvCw%T`pts`K!Za#Os|pR41%Dhx(J&Ynb}}GIXg$(!M9VLYMN95y%@y%vX>~# zmjIfJ{11kKJf8euroBrk#OUV1z)VNu$O=f)eUAg~z4yT`RwQ^&|F<-5o)^~=hHi*n;A4A$96(u& zz6T106j0hR3DPeTNbf1M#P-%Ug!q7F*$QAC*a{}`=vD}y|E*Bwpj%;lvCWS+ZY6Df zp#Q|mWcQ2wG`fIEz~R|2yIyCHq>JN9709?zrxh9nFf0eEDvGLz8A|2!(&v@c;kzcn zf4EaN&ZprZC$OM*A;Izny+@6(b_nHep5(q)OVVd`K?!y{?`q8aj-;f>QjS)i2dyFYrS!>kqBs}4GqHx?fK}?|FQH)>w~y5#C>4c) z(n^WMxURLFY4nL%>LqOI7zPpoce+JLmjkDL;Mgn9U?i&=Xx7mkO7Ux}anNNo1rf{i zuQGWS>*fYR9_nFbxInJ z#uoh|XEqfs9h?40SNOkmyE+ksM8qVdWaLN`8iU2*DJZF^X=v%_8JSsFC9z3nmm*b~ zbQv;b72AESi(9rFx$@*IP^d_;5~Vz{atew{$||aA>Kd9_+B&*=`UZwZ#wMm_<`$NS zz|c;cd~CM~TTR;U9VeVjp?6&m3NU~}ANbHm-t$QWfB-@u0%9NmQXm6zKmrOn<+Mkg z^@uas2$nAxaJ=~O!g$E5*Y6+D`MCLyLWh-i4-R(QPQ>evZ*Io=XD{oa1=%ve_1lg$szem2=a}pBF z({>1!YW6>)A>=45Iy@o?=U_`XF9_boBw^wWi5~%ZWLiFk5K!Q?g0XFX!t=lRfchkR z_c?-{3kuwtd~(P+Pka?%gva;py-f6~&*%sWg=MMdU_Lnd&V$AMVIMdYH~;_u7N@=P diff --git a/fonts/KaTeX_Main-Regular.woff2 b/fonts/KaTeX_Main-Regular.woff2 deleted file mode 100644 index eb24a7ba282b03d830fa6c63ee897d92a5188736..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 26272 zcmV)0K+eB+Pew8T0RR910A`>74gdfE0Mb|h0A@!30RR9100000000000000000000 z00006U;u_x2wDl83=s$lg4ZO1h%W&)0we>7bPI$&00bZfh>~Lg>lfqq!H9{pqisKVY-r;FZ|J_}3x%f#O2oVCoLIe_|K;jSrB#_|6tcF#nQYuiY zRK(X+)^(Nr)_--CzcH|L6YOKIgtS zV^e?n{KWzdGz>Uvr3ogO(O4za|Gv{cJ82%+Gi-Qo5zvVr0DLZxboS5QW$DVXQ;r?L zmIH039WJ0HEy6d@pqu?CAy_CO;Dwq|QLaaOJrjSrzwPh3%zqSH-@JXOXu3ou^maSn zD6Y9G97Z4w7UP0&7>6YQ{`#g?zwBT4E;k4aiG}91V;Mr|0QXGWtJ_n;Rp(_G-LZ7X zBgu&ZY&pQNp#j4J@h#fb%-g|!nDK9Z{#y17F$vj|Ow$cw^7Zx5lyr?)4bguwH}XpQ zh^e)Sc&Uh2jvmQxaQ?x06H|Yz6Aq_$_jY?{Yg@O_mO4~aKnjeqsU9vsh70XIBy6)b zDEZG{)L+!>A4obA0Y9^&d{=I z1rQNW-S`)HK@33?1Q_TF+)dX^5`^^cPky~Ft6Q`9TUr!UZBCSJl$f=3h(YRSXRjSf z|1Z&uk0Zv)$I=m0ewE+k>r|MjE&PC~R_Rj!|nOT6qEFfVQj7#Ym zT#(NMmbwG?5(z-e(xsRnh)SU3rz4djk$ndW^Y3v+-m1yqPKC2`3yQvS0RN8Pob@ zd;8b!bXHN=2_&HZ8t7F$c?Gy^Nih!q&MrSe2jI^R0kDYQI<#j9%){aPfS)?x`Q=&T ze;vONSt<60DE_GxGmtaG3@m-&0D!*R0D!`{Qih;{g+tkB+RXlPxk(?CPmP(j+F`GQ zj(Fb(uJ9QTdCD7m7S|H|w>SDl@6XB!CYp(vn%POFc7rMk#lR-EFj=&{{lr&x)zesW zo%Ggj?bnAoubcV=pc+-I%2cJQ&y*#GBe3Jl9S1IQ$j46|O^jh0a~NO=>)6F!u5gu~ z_(jcDPuwRQ3n#;e;bQnHHpB_(`}9-#Gv@EO>}~ZQzI_W&s53_1v-aUppUVH2i=Oh8 zUh8N5YF50z;;G)iid@mRvCYk9@@waPI-_&)9l3J4dyfH&BTol!q@AhsGk^3j+vQ90 z%O}UwV^UsNR`u6KTZH&&GeE;Z?ohz3NPHDm~^WFB$G|bQc{%3#t zH$VCDj~eXRv1#=-x$atBdbrr%&&ypOiNWIh<`>T%eDmOxlRj|5aql|hpab^VYmYTn zT5i}<3oX!VvTjXkj8-ZnUmm?$81vtj|1v0$zr1pCBzfUmiYZV@#p@p#Ym?$XdCBE4^S-Ac8B(w}LdoqS zW{SNqT+QhZn;21I>&bWg=z=wGxLwj{noRNmp)%vbIlS`JibX7HBJo@N->MG@^Rsy1pre=gd~{zgdtpy zn2n_Z+Sm>>R52!1rK&`UBA$BF7r=;I6;&lbvI-NX#p-VGC!c$0vW0^JY!88O1>p%H zDGt6c0`W*mKw2U)l8}|W*nrUgC57b6b`VsA56Kdl`^~*g$Dez)niYTfv>cY$x|!>Q z>G3*Y7tCXxITjL*q7X{rP!>i-JgO2XO&mKpn8??2YsiG;$qkT$&t(L+zLq z40TcUyY+XHJEaF;3U6AHmgU5rzW{T_OMSAk3Ts#3Q{}fUIH7`~80902Nxl5E?yOGI?4JPi3SJD(HQ~V!qEX=>C!sDHfKlD)RXhnK;z_jrBbge7wwh!-@4pFm_VvjVzjHy%f1I zr46__VjuTY9Z2x%YPmJ+3}kD28wJ42B&V_3;nbrKcK-s-hM>YE7bSIMO(_WI=rNA> zsQ3^VMNNd>0niYKOcAoO5(c{ipd;>e@gpFT=o#U60St^op_o9CC>A9$l&U1HEXk2~ z04&6zQiPnUgrV*L*oea|T%@ec)*)qGwjNubZNN6-7A`OX5%8%5oj6dP@hY_{ic7gA-L&R?^ME=QQtoyyBdiN-P$&opG?g=KBml07vkd* zUTfQfs%iHeN@>zlRDVFPtw=6=#zGKmEnltGSDw0CL*K1B!#q8-j^-x4YUAEYp65S^H&E4vkORn<)pBD;FR^%>Kd zRDt-5P{wP{7;-*i0IA&@F{6mG^AKYAxd+Si>-;U4})pIlVQG zF@uXIkQ*_YVfrFqqU?8*PRBGd>H_8v0dOZW;^kbUX(1JRfZ;^x|B)`UU~%cisy;j8` z9Mq=7g)VqrMa)i`jv|a6WoyK5m8vGIEj;L!kzzW4TBhy<%oB+Ggee0!2k_0bA)ELN z25&eu&w0+Psylo-vv~-ISRrnMl8SW+1P9F|{i8+`woj}t=L6PXmL%)x(w&6-lMWom zZ9O8Qq67y(gfVKf0^3Zyn>m$hn+0PrLLJ^h!wPYb9hrQd6fie(w|u2QiJKHBb(s-o znW8u7iL6WUY(DD6PAX?JNlxb=j+IKnZKW1Ma6jG65ys-J$dL|4`V2+>7{dP(lK8Az zHAiH(brn_HU8@J!7dj)P%>SgN`d#R_4t*jgJidVmxc zj}otq)`2S4#+h<4F)=pSXK@*vD9}`vB&SdsN54)ail`KuH z$E{0(c+#09wUL9k7-0Dven`ECk(qi|FPt{Ce;r>fiS@R8n#OZ>dSTsnBBB*?keR3A zTVYWDj+Up5*4+EFS)8RWaE1OS{(HJzGX_n57cq~@)>Bg%Am(ZOqYMw$)pjZyc~Bg~ zYXiHiY17y1@vYkK@t*jnsz zr`UQ=i6j#3U=TS}sfyzK5T%RU@aT>H6I>l@tMw+Cg{?i-vi|;nZJILrhPDXckS^{3 zy`Wv{B8(nPy11x+%cx)fC~R!354^)Jx9rvx5lb38GUyaBnGB25B_732qnFy3+LOW^ zB`9RsX2M=^+smS$K_bn`Q8mDmreayLj2T8A5>iVQf5sk<@mb~@JHj82N|svW!kL_4 z$`sM&BCAYAL7|V>8#4A>h9}jc+mkCXU_+rY!iJs}BGdb~Z4Zi;SFlFkPs6Z@uJ7R} zD%(p{%YxqC7KZhp;;LIa8Hj{xV)jtw&R#kKo&5UBmCH8m3nzHJ{RjIGui9$mp?!^8 zYcvzm1&?#YTCSM*e&SuZ-5@DY0_Sd-R9My4Ma#f^8l?<0a=<~Y^R}C&Bf8*s*HcHi zLw8wY{e~DC-~95jxoFw=lkx9#L~g@w+vLC#Y(@W%_d&$*k=qaxlW}e@g&<+{VnS3- zmttqEOTy_~nM{Jlup|r@>0sBY?)P-c5~ybEe}DyR4Nq zA4V*rw|CGu#H{A~NQLMPanLp~3-o=<9^=jNDd41-fV6DV+v4N?Mz&pr^Z6ukF+jSQ z`CIfUxhi2gP`7zZQ9s;!1jl|uNs8a2bQ%U)$F+pI)abWQzSVQVn0u|Lt>v@t=xrQX z*hRNxI%+xMpYlu%RZk*I38b(}bt0x6u2oan1AV>unzadQyX$e~90~A=9{V|mXlB{C za&|FH_++zvnnbtOeN@IbHuNeD&A7uf~*FDSy3;WfpSsD zw}^*&btbEnHcA3>YB?&C3sfUDhN!#((oH;40r=WRn+Q?1)S|IJCSg^%ByBdnHKcJ> zjZzF(=X4@S@Sua^3y+1Zf+nLxu*8I#XB^BuBLS~dzY3r_H5=4fPNU#1HRcW-VC!kL z{Ix76G)Pin%=$oDR#el;5Y;#+5R$;i21*JAV+3bE5NVkUdQdVpvKwYaz0uSaOb*EU z(2`!WzrPE46M(LWEOx$Tv?>E>c4JH;FCV_e(o25Dq&BP2>l9QdI%<9EkFj^71cN;Zg~_`Xs&ATcc$3?RsJ(YF)OoL3-jy(L zXluqq>#qSkoSczTNO2RLIsVi2=) zizn^4xjUrGUCpx}u#{L5{p)bcJ0y->C_MSpJ~q>26w(bu%2^MF zf|o1+P5u2qni@7?bva zAJrx^;k%Hmfh4hSvWkLbw`N!h^Q4jt;GCgB54RPFYmb!HVfeVFnO;R7Hzr z?VCdyR<)4fE#lW|?FSJ(Ax1TS6n=(QO|-iof5oYvfE_8e6gu#}@dFi7APpiOC7PBl z+q3ROzl*$g6sJzJQj4^F#1lw`NT_WS(`CtscsC;x(+2_zwbQMF1XZ>+qG?PHkaD_V zJP$cI_}eVD$^cNwB6c58yY7eHaEZ4#=p^yuewOsjU>@<1_T(J4`fLlL5?5nEz_D`8 z&j9lf$wmQzI;pn(W5yg33_RR~Iczu(8LJUvsey8iF4SNL6?K42V9x~3Uf zEEt&X{@|0x&6m?sM9DT!2#@0CF^VY!Q5{qJ>Tx4pv#ab1j>@{5&5C=8Oxd<)v>n{h zSM9P7fBjX-jgxDMqIgd|(=%KJ;%fX*Hj?aUW<%^xW%+VrJ!5I7Pd8nq&d`DOq1&!* zQd2T5X7NNTVvU2TYzcH@*UUFmJtr8X^`z?_UJa(L&1b`OOUUkdo>Xk&BaZ`>2@4M5 zQUCldPjNCn+Vo3bxCB{hD#4%?x|hY@$}VC%geoD`8?pJgH}-1SK?H*sBy<>9e$()r zZ83R%7lC6tdkMaYX&%XgvCEu+Tq9;F?0F z&4h1lhzZrqI%Kb4BgK`K+{*BjuG5=4Q|}$A9QE3=S@9qOQxL>MBpfM8bT=$j?8}BS zr8#Awi)9|7La~HYRo_+-KZno{P7Og`-w~2Z(M^2utY;EoS7z-`3DLBA(QWSE(hF(P z553&cgp7{M^1J=+bHeZ_i69Ay)<`z?qaiCE_QGBjS8PvL`Wrh2es17acd;lbypvn# zEqNZeRL>}N={gCB3e!ZfO+ML438Q%WvV-4PC`Eck3gI~$4f(3`nio2uNX=aXe1c+q)R+RGsKc| zwJ5y<2>D=Sl3t%%HKcgSgWg zB5KwlsBMe-P>ad+Y4HK3BQYQMJB=gwL|x(S5kL2<$wU1t1ZOC;NI}gXjjj=|qrFGS zUK?^-&EE_N1Lm6*ERNC?| z*%)mwO?OL9Sr3U0rB@g?ujr-xiuIBzBoIqd7 z-D~b$LM5ggZyx6FicZAd7gO| zi^gD+ZXhM;q_3mp?4ahM7F>FY&*0iOS}=$tHVDQ|qD6Zt^T(E5?Yg-454z>Ok94yh zakth*Es;?u2I9gD2bvRvTCX1FIZhD8a{42{?Da;qW`Z*;n+$Ksks{KT2_a@v8^NO$ z;-edNnrJ4VO4njA2t=n%J*Ddn!wy+ZEjWf;V*9B--~@JTrW4dNsezalN?#x_hcyRw zKbR@z;*}h8wY+2%5qv4!C6cArQCTu-;B5j$=(+gU^d&AP>&%RotKUSssXc3mV*w$x z59~tZeYSw7hDS5x9NxzPQ#O&|uKNp$GJGEJF&Ci*;uwd$xb$gwPD#Thwn|+PzoJ&L zB}O$}m4u?4z=kBKDlbz_KG?2Om)h3o>3dN*$_3b<_DtQ9gZf}v%&crEfE*W(BJoNz zpx$A~Y6#t!DyNex2-Bz47$r%}%JAo}V_q*RA$EC>_{b4po|p{WqhbFd6Kla)?gV0J zi8uN-`Q%!T^h=rJ)Q8-w7SeGwdPY~b1q7}u8VR{_F?96gNoJrZ02JR$jNgzEJ%U^V zJXzsor_7`Fl0lA>*kL33pRlf4VmJv4e+*Ek6Oms#QeJqOH0SON2CR}>4m|=s6FS@G z6NDD<1F6ZA(ugdECDdh!-t(E&O*Ofr@w8mpLI=VF^GbH(KO!tAbThH5 z78-kQ>g=)Q@@#efpCuMmZr|dRgLrP_*1AHsuwZu-O3nu2VW?rTWqWU>^fo_o^>XD% z;ha$IQDpZJ@>xgW&`c)e98{;-Y3ht|7VsKo)qxC9rk#)vPEpAT6+RN?G*|BWBanqY zg>R$w6%)Efhu_rN^dEeftuSuaSx~7PH0m$D7}=UW2@GDcH0jaOCIHv6c94wC@H@g% zad8lzRSTIuGzyu<^oUfm{>i536nt9RLr*Yps;HGdi*EucbH*3ieWz*_V&jaXE~?je zEvpe_69B(d9EI4Svv(Cu$qSw)RR{#6(@GgMy3hj*^ZqRWfk`EO8bI%3Lgu>SX^jKq zJ&&(i2OQ8OEkccb5ZsL zY|P?LMF&ks4I(g$q+;fJDmMtTVst}>BtY2=Y*ZB`kJ7Vg5M!4XUw%51{sG*NC1QHL zWCrqu{k`KimViHuLi!Tn1kf*{-?jm{G>bbR=-1QLD&qVp!tg*JsVQ~od$G`O05*oT znDs}*T|L$;Fo+aj3-dB87LJQXx~&Wjt)c| z^8?1NRva9C8K7(|(==;ZP*Xn&J3hYXeZ$jspRl&N9X)*5%fj_zdH}?Qb9m27QS)$& zPM%yk^cvqo3|w&A#rKlw#qO51gQ1mc{wQp^N38ooP^bap4!&X@hm0+ZEzYQW4%razh!{`nq z3Yoz|-nFzhZtzWTQ4+VSYg@gv(1~Z2XB4t(Ro;KIr2sIak#6Z#vs_L{C6YL!y*@|; zsr#EcQfI9L5Cl%~_;bDBbyne!TA z{acJn&8rC?J;UiDGjjcEUC*v8oBJ~)M$-=_i!)ZxO**NU<)JU+m(wjzfUv_vfJKGl zzCQvSr@}J2$&aXR$*$H=CdUw*eZY4Q3^i?le^x~t#;oxTmXgNl)&nGSxnwS#6Gu}8VDpAza%6LOQefAp}3xW5f$Pb zT`1(|m4Ay=Vv7!Krym7%UJ^(9ZWy^!sAA;&-JSi$X_DBZJsx{lXEyE`i$<>=Wq1|D|ZCeVe>LXoHc)0bU z*a!mI*+R~-Pt9lM>1JO6-s*}>$A*k%LL1?#%Y)v z8WRg+?OZZXi86$Pb-vl@s6M?Hq6RHDSGq|n@M~dIhha+en5{koVMvO~Q2DTR>eH!) zdA-Fv-3+GK)>a3*RmN1aNO((kGK!WDXE| z30Cl8z>>!6B_L-=6Dxq&V5Lv5q<#A40w+ zUu5}QPVdGUMb9(0ESb&d0XAwtg_cw(Jz4rft6n2KZD{1avCE%_hd}Z@LENdRoR z`xXZcugNpUNacXF5M0M06fzP@bQ^FJeeKup(GywScqA|z>bSG4*~(T7qwxvID5Kwi zChNRb`C2y$(W)?dQo{;oC3TLh2TF}DbXTIk7Qy{m?64bACK7y2x&URhw4(x(IMj33 zG&NF>4pmu>I$!iNOliB#;FvS}y6bugal5}_g)0SK>q-_P3I`TX*E^ zTZ}LE2nIRUcE-MXLz{~UKv;jrvY*^G!pq2q?mx+dVio6q7Cs`&xouPZ0a24ZV1u$H zVSh<#;m$%0GkvOa`t;Q4J3OwZun+h5CnDlrYWHeb(ZT?#`yvw2qyHK}||8xP1*G?TAIW21E>k)$yjWXqP5 z3g(|w@}tJ$5?%oKMItuNa-ij+l36;3RU5ohPx?6%sTpVrOWzCkiP@^a6SzB!CevAb zvAcXXqyV%*EH8Ty1j8lCM8Pq<7K#yi1=@9$Mt~9ZaMEzpYTfap47_d)d;kvTAbUgc zw8L0Tl5PO!AJaWpoXP#{aQgGuMld`8Y1~2CnCN}pZv@eNt%9DW-D;{3&k>A5>t$t} zLk9tzx6)b4&bdO|$yP#Og~jL?f)A%QkLi9|gzbup7;pqo643xoNJosB^V-7J%aWCH zs&E2^wdl4WE|6rhCa#`qe`LxIYES%$Z#AuD-#v92PppbNhId%)Gw|RU+836DzB@{j zxQ!5$+(`1+KiE5mh!a8q|6cXBbo^wB@47Q={eb(4-mCjxaJKtTo?TF@co<v)1EjY6M*LB+h&!)K&x{4T}LtAPQB z{^=2fP1}=}Lh;_Gb@@@TGA7JzH$c3m&N!2o!^ysFGRA8U^vXp(t#r|c&=|3~`WJYk zyUwvseBm$@4~GB)Q_^3fi4o!=kFpvAnKah&J8qLq_SR2;0|@e}ogBDwD6R-~+xP_d zd3-LnXvyudVs}daRln~}E#wICvPHurY+_}E8nHN5l{CcuU zD{WLRWPcOtl#UDM(3X1-P)T;(oUO%-9+Nb?JzKQl<4{3+uWY5&Oe4!Bjs$#|EdbYDl<8{6+jt793g!I>RxGOT1Q>8{&fB+S5XU(u;Qz-={*xd^u18@? zmoO&?y?&EJoOFt?xi>uq|Hae>Q1}hoS*?oTm|9bS*M3-L#z5_)hH8V}E^B1&*~lfA z<+4ejs^McfaTrhy%8Ou2`fP?>jJDtY3H&?nW3(*{aqsG!RX(^pB;1Wj8(u;_{ozyV zpQJxqu*{N&EjWK~R<&O!0DH1f2yPEXg^fTC<3S~rbRWn1sx=fV=%7XBAUZR86xl6B zSsKK+9NNUO3jT{89l{W!Vp9jWfJ9b?#z)(>3E!?`qT@D|O0{sL6LndY!xL2jT?%*m z)Cf@_biAyTEE?6?JNSmSR^F;+BC2eRlw&1elM4${+|Z1JHV&oNF?*QPB2l^~fdkyK zG7?kKq6;7l>s7Dj+PsO^KA73kN9=6~1AIb<4?0aIp1aOBV=?@XIHaz`RO8lLZ3v3| zgkIGgd(PdhJnFMdGx%2mW&r%e_XTUmQ2c<0EJtzGg68oX8GMUnmZinT@pegCN(vu< z=dEvh&}Yh46uibBsR@^X&Knf^vjDy`Ux0ITL$=@G8}<{zZ3-sgN>4e?mDGrTDc+iW z*zl>$sPY^&tR^Dae=+l+wnMrF0XIN8`7f)B0b$%>4qw-W2 zi*L~!cJ1NEPKs=t;I^Y3_2y+`i>% zHD4>Qv=AbYzn6;`n?aXFv*I{Hruz-t)(>Q~{U3oSdZ~6 z?ygr~(4oWe>)$lkwo{^qVidV@_o7~?hitPIrBrNjT6|V!k)d)OLta?<4>=x;-%&i z9zw0KBFqn&3KPA@#J~<Vv%n*=4@AN?XFJc7NgKP6b0r>>Zh??`I~-ZL%G^EZx-b#>9=SHBE9AmlHy0``7R2SifUGn()1FR%>&LmSre-F)6&ZMS)DmTCO9w#l@rfDkCC`PBKuD+_HD?(~!4n+JOi33Jzqy%#)$4qq(eHbfHWw5xtvy z@qeam0+|tA{dF$4<1|Va9y^^|&caS%EaAlu(V85Kzb?0KUu;y-@P@d+$?}!)-N~(S zfeoW2Q$W`3;KLHW4f3PFCaM)8uD?U?#Kpc7`WtZxYem3@LVmst+X^pP1aowxyR$4S-9(wAV7l~ci4;a>eiZgNEUnzPo1gvKrr^X9 z897xAHY?tFuDB{AIXN`Y<+3+fQNCME0?sZSO$J9k`UD0WQl8uON_0zS_aDpO3H>-42rdY0X z5{S?pxmWOoZ!EytKal{bI8w-n`swpH&yP`+EjyM)7sNQs^=v{&9gu?nI~65hp;hYi zSi`#M7|He5PLG^7d~oq7Drm=p6ALS6&KaG3H2&l9nc;8Ip0ZGv`$wI10Wy7|Tc-+T zly-$hl48dx>Y(>G3H79s2);LOY~D6ULMS`kooSZd(%+CK!q1K+Xqv&e@*|u6P?~mq z(`&);v|h}74dS=++hKu##=7rC=Jdums=g`8AWeSeKq_$aI83Jg87Vmz!B6AO&mYLn zE_*Qg&^$v!aXJnmTJ%5xKiQQQ|94f;Y;iWYPtZw`m}kpN!W$rbBH_&_4@~MRpO#iW z$0Qc>^86{qGyZ!te%j<(S&C`CB0kl*a}}5ws$gg`LcX+EyOPC>h*wPZ>OZ5+>pA{i zdN1o>jW7?^L!ar}R8-wxP|Fa*qjh-w7UxBYBRO538!~xN10n466N$mNl7)*hYGdlN z%-O#5jui2Y#@EAS^nTY(uhZk=MMu0l>7c5h(>D$qN(uH}#M@c-KaYb{GAy%ohMTzl znn5&@LJt0SGhH1Csr2F4aS~m^(=1rxSn6zKv3o`lJjN0fYXX62#o&&7@xM*zIb+dg zJms=K%>-Gmj`3ej2aT#|8u#gp5v&;S7NLycilvSvg$0d-axiiLB}lp^Iqc>C6DK4O zSihGfqjMnLb8*hmwo5Qhr_GBgcrMRw8*Qg5J<;J|1_c|Bf)dz2rIz0&H%D<3cj!~| zR0{o2tT=P`S?`VPZj~N$3mw0yUBdtY;Plv7<&E9BWAh6fi8&>>pDHsKX(Uoyk8yjJ z`npK|>hk%us@$aN^7u2Eqt5s=)vH@fw?swLr-b+>W#-aIv_4~9ur*gUC4OeULz$;( z8fMormCKJ@naS=Td^LZw)(DfgZ0EBSU!=4-ij`Cn`)DSk{AM`=drQ`pA7$wH9@q@G zBsUvD49?W2fU{|0x5l(jFV``jbj*Ij(sA7+EcS@q->0Xebahp&h^|{x5nfW0Zdhep z4K+1m{o~fD`;@wCSHbx*YFYiMa8n>?<1cqH8uM?^NwN5PU9ppS{u3~wQ}(IXO}m(s z>{tUyYolsq@VRL9j2XqnU|3NX7-w)w1!)NrCBvWxONXQ4O1zZc<;Ks6GX2m_%I?F&fx@ajO;W)euNQ{gj69G7RaC66&=~? zaupQp>D9P?=yG^+$F#EDITRy=&enRk`$0#rPB3>DcO0doxZ@XZ9YdVI3a;tu!m?m7 zkOPsP!<5Ki$#7?>%}b5Sw;pYZpFZ&nHme=tO^?#ByLAw-M7(KHgtRT)4#T_^ET zX9Yg|uALuTS)-2+st{=QtmI|I$WB6t^C~2EBE`#+`@pQpuMTh3gy}fT7tKqIfzk9tV4i1ZxY z9wXARiw#BM9~#iI!(m3bvy2jDMq$~J#0T_)6F@S{fpJ#(s^t;2LORP%2Bj_1@_j1_Rk(8i_gD@>=$IFpTQ6Wb z!hyWdpj(BbXv?$0bhlOb{y&4$kGh>|JIvk-Mm98GV4}f6kAfJj(!}GdLQC^JGyr$@ z%7NYuuDSTXAz4EkzIH3wkrOu%X#2Xxn^}YP5#!1|{(H6nubcQ+Iy+ix%XPLhy?JT> zYYt%9BEN&1Z7bcAmM2(?rQpZf>2tL{`lND>T`UrcKd32s9&7~FQzn!5b)r#gqScERd-DBuy4jYSbODn)nVRpI3rXgDGdn-@$x`Nx6CKsm!%Q>}NTNPJmE8TRdJ=95q zVK_RNEj&aCHwcyc_9Cq9*{lJ)vb=i|s1(CjRn3JT`ey~rgz{;M480B4!H8Izo+T#=4@vEZ1io8b0sLatL-P%IvdsTt^-DLF< z{Cs~ABH1Yld`7XhFgn?8PfoRM-FdT)^1C4;>pz#2*((qiIX7# ziK;pp@#kgWNZFWRLA`_G+7f}XQ+uMoCFz7Z1@h;j4}&A3b-~|UB2~y(S(jU z9Gdi)t>fzczZ|9I{os9`b-{WQ7UqQ3-wD@Y_u6~yEFITFuKsNC5dlp7)z8+UybC?` zM=>2y2LGP2`8NnYB2>xEJb{k+WWw|!wvJA$7a)^P!BERqsN&|MCzy_TKt=#2RjyWB zv)<>;Y}J(GwUK4h>LqkZ7>K7cCr3qWdRp|<)&K(r?{xsvq3ExDGvi_=Tc<{~wl^Pa zc}I0$FBFW4UpxBxWkCL{gM&*$OY&yr_d_Hz;(tsXb6dU3z|irFkb|IlOXa%OHY(=c zlO&N2b)I6fZiIaj;_?C69U#Kf%0QnLb6BocpgBw}2JvYK_RG&e8O7yMXA(}vK+DeM z(Y!8}$0C3Q=)^z1TcE95Tc<@WUr-dg+$_BKA%l4mOJsEt6<*dZXz^Da`r-7wlV?wZ zOImIjYVyZl-_tyixP5D#3C+^{ra_1Fx`!fO=k@%ERC{g4Px)|NJ;)i&!OmHo8=C98=WUo)hrWg99VUPXvMa42*C$2jc12c^^aP+ zv|oe?_tRFeU}Vi&NU0iEL_TqItEZGvksN>5_)va(^DsF!2g=b4;t~Je@kBdl)P z>=N&?=GMi_qBr=F(@?wscV$gj`zT5MT9JZne#K~(@x3YP+_L!Frg!5)Tmg%wRTtSu zQFDjN1F^?6RbyrrF!ij;>h^#Q8*3HS-$~|YmoYxV2y$Hgy>~k)?jNJ=+dMjt9oVJ6 z2OL)*Kv({u5}($c7L!8S?DO5Nn~H(gK0!Bj>vqV}xngUi4$WD6I!*dOhMRCjeuNu> zAicFay9XvnOdq>j=d9Jo?;zF7=7C4Wpr-?;s>Kv3yf-7gpy;FfcZB@d=Pwz%vQl(c zPFv!37vyP@Oef!+W)|xd9o{6T;*33FSzgk2qpMp?5su5LO+vPI(j+&fR8XGz%>u59 zCEHJ5!GaJ^rnhJsy91ru2hE6M<2vlZl?#{-$5L=;5X@&xc&ni z20c5B86FKx8DW}YV6!M78=n{L-}p&0g6x=rkk zW5Bi)DtJL($AV}u_>vc|U|>{gqC*!ezOQ>JmUe%Pa{4zja>6#!P3v)iSR8;a)Mwz^ zKq@~ljpZkFH8FqZPTirfxo={^L*DvalrbmW$QKQ}xTAYZsYs^P zH~Pxw3TMWoP$|^wzzivrkeDJ-dDB4zwEh|!9_}$&f6{t9ae~qYS7zHDJ=UW?ou68s zvGD&xt}(eQqUE)A&iqp7_un;g1>h1vm2fbk%)v$u!$-9Cb8fq({Xl@=`<;A6Eo)cSA%>r69uf|49?+r7>tYH-b*0^aKttlOJ2BoUN|*h|&2=O>~B? z+fZfWQUmXOwjl2X;iQwEpvO1r*rdTwa39796Ix!=U)LZ{r>5ED z?;z~%MO=eH`{3F9>+_f+J2w;_LKl_twI2-V29|;8pn61|z;rXB)mpXAvBwr~{?m>w zUQnoE+BZIQxV(Cyj)N0)FA){4-N5uid_#f(=c`VS(WCE;mGbbf57+XxXqDBaTY-Yv zU@X(K#mE+m(ZC^Fd{kN|UB~VcQ2hZxj)2Np*h))#cBDh1LzkD zAY%)LufS|wi_-wVC zq%5<$+FxxI>Co+g3c#1n03V8<6+Z(xL@ZP_`4^}Mae)q9?yb7V(4p6!1ijl)9nVbz zrWaqP<){0JK@zI-hp;P9$Uh#83aHH(`zIDG7NbeFxHCfDA3F?&1}^`TFD)vT z=Y8*~@rg{njUqC;omiyGKP7e>VDuZ^u+x@mOn& z7>z|?=6VdgLiLMEb@WFN?qep#qep1L!}FgjjY+7GlRb68@9H1QWraXjaeZG8C>w1tAVs zMe@3QSw+5qemXOMoNBxV^V0hVd>b6<**sE(u6ZLH_Y{0PT{^7msPzkO3XAD)OSz{7 zJjM!_DFJv2G0ymRd@Rrd7Q7avxRZ^!x$G3o;Evrw1A}0IC~690VYTO^G14nY-{RI9 zuoQH0(rB^p{5FYtWAm3^Ko(RxLWs8=S^hWwF8X&Kc}$H90%Spc;^gKimMAqNZ&aH# znv^^a_!&*PahZ;X(TVTDP(nfoMwS58XsXD%CM!6h(&B}BR-O8Bgy8GvpIw&j;7c%A zEE!##DditJKlZ+rGn-0!o`)gQIbNfY4B~ni!ewoOpfzNEC6W@j@QH3O=2T_mmroXJ zt+D@Hmrs{^g zM?Yl0hUFw?I99HO;_b%353G(Su{J|lZXB+_A*{MV1WP5bNDNEo{d`_2*s6v)V6jpx zQHn)Ln8hv|0dFRd+2Pgq{&JJSS_In1yhc~dpKgxwt*#=es@0yD&FAIM~0I0 z)*I}d2F3Pu=4I#b_+salw2Lj}q(*x&A@E$A+PfyIZ7{kZU-`Y1u3Ix^vDiw}FH9PM zV22Z%7>=E0(j$GomX_AmwicxU!ERu%P}AJp;?Nn=P&d*UBcN=nBWUaMMbeq4F`8vT ziy~eq7Bp!QuRZL07dlE{E(`yR{8>gqIf?Ev3*a=**eH#!7q{ zW)CK@&-QZ9SnH|oKh%!;Y@f})FC-oFeAC~X|3QL>Qw@3TP{tbw`TfdgDW)p@d#rxA z@+jhaRV~mJAskR z!iq5=NNEb=EU41{7_P{CUusgxR6+my3o_P7Dzn`!D{A60Lg%MPrSHAgj&;i+p_)-R z^GcmK%uoN-?*~8y{VNt7M1-!4XyVr~VG!KXg387Fu(@56+<8hRWb1?-&hhb8rrfrlYf{X*enk|7V5uCkup$qE#?K&{Im{!YX)to*Cg|HH^2%C5*;A{?9hjY(I58ggy=YtC zWpG(_mx2a~*a)kRH~GtKiC4cY7Mj*O$__z|pW&?GqsFiHKz3-0Id=siC2tk*hfVo|2J+J%5cghjX?~lXjB1lHxS= z!u*tu6)v=9gf$hC@%A!nabuRf$c(o!ByuU&*W6mb;1n!sIO~Q?DcJ>;MP(Cq#MqOx zM=ou3+R5B&+<3j|_PFs;CUoq_`p4wQuknHq4{mK?r5u9B`Nf3K`ObPjG(HP%?0W+x zf2*r@gojK}LIuJ4JxDEg?=3{QXePYAXaFlk>lL zMlD|pz|V)MmWs{nH_=7VF@e-LJqf}$wr5ZPN>Zi zv0JUn@WBt$ZL2Gg*RL%dj-jc4y$0ANxHX#;e^f*}47*v46Zu7(UA9RaUw-@izZ9m* z)Vunkd3CZpZ+Y;|;1;dwFO~LY$ynJJJtPA2>NG@sR)Z}i+1P1d`*B*B4tvr*1v6LN z910o!1QNNPh&x4{2vt=lq1SeT>jT@-LG83>;A}Ih`x{0Vqfi3$Iy@~*O{xF*=*RU_ zC|Fzh|C3r%vPqi{y$?aqwG4p(P8<^-T6T2k=(14!m_%40*d1V5jh~)C>Pg2~1dnUAFn+vN{ajMI^3-Ixtm4~v4<4uI0RJ%|f8BNyDtQ-c9J&e1d zBs`Z+k@OQK{=50{9|O2NXg~JoQ8#M)nY@}@e%HsG>gxMZq57dOpfq~7T-EpM2_d&5 z*U6-t5LU{JWY??DoGiP?xVx5w3lZE z82J>US5zd>wlmk9)Yc^=n3U3qX#Jk6aNK_rX0H&RPvjWb-jLVviciDPC-Buhs1M?W z_(1~J(&(9EXC^Bz`4f<#*&{czn_sU~$fpXui^o0*Vzed$PPbvUYV_*y3i>in!*K;G+Un@#@H0dG+Kz zIk))~`erf-eM!&e@A3&LC5?9fn@B~l^R8|R6z^Y0L;g5$6aEy)2=t!>_4GSNb^l|3 zo+LwWJd2XORPFDo|Ff*J2j|#-v{oQdEYB7W9Uj;qBIidl_ zhhjf%PFrr}*%=7EhBz-=l9)`1HthX{#@WL1L^@yIdL_h%G8-Xp-bmb&gs&?~ia6Dh){m-7Ra(ob z!%3s6Mf>Ysu>UXgcTeS?cUhN{WW{2-6g~JZVVbm-#u$G-_aRz8b)pcv!E-taR(`#k z%?$0@^#-_bHLRq;*hwb!?7)6-mBqLT%8krF0yCH_!C_$tQP?qP2@B$|nBoe!s_Ges z^~ZUHDkSrun?8#zC0VTNPn>~^xV`Lf&b_!|u7H<%O7H$zD~*wB@C~{t9EVPvVIVv0 zTw`FYa(?9Oyz7yi2^@AdJ#xBYI;@JqzX9eyi>7o33%sUay7$-5*^!U{>*Bx=6SZnk z&e)~33Ee9!&WwY(l5q3JH2XAEn6pG`WxClMH_JDrjPKMp?Bq7EC65$b!@pK(bgQ4W zuSUqa9_6m$_hpV64#r`N=J)=}3b6?r#;9fS{Lsajd$@ZyUTa2p0|dDYdn|UpD9hZDWO%!snv6 z))G(#?t^*)RPJR4s1L6)h4I z9#y9=2WwG1xM9jkn}#6@8kfKqv0#L74&|6()-@p-N!R{1>1P#!&Qu8~DCAQDp80k4 zl}I{{BD4m2J!4!t2+qT+5JDUO^gGDVxo-*$qtj?68kTthR=&J^i38=v2mIhwsfK}! z>Kgg<$cvb@p!hh8tIwFqj5Ni_-v_Mu%9p>1vKQKW=n2z2<%6oP97*dQ2*{L#r#6O* zg>2mhqgYtjUYvrkw~If!8lHqsK{2jALp5RQ{N)>*$hGk}Qu6f^F&=T0X0^mUq986? zMdHMl6j?VxHBBuT{b5q^Ht6mDe;-fdMP#i684xOY_P46JAaZI5VGB8pQjwI%Y3y`| zeH+E4++mHKL=GH=#27nKAsY!rOlmDs{S9QBSQL$pkgyG|!+q3*DI7nm=!y=ai(ou| zOqZ9$>tGv9B6OO7h4yzxT5H=LjFXLf(3a@R*NDLXn?~jzcXG6M=}Z`b*aA+YMBO8_ zH?=xM{dm7a)YK}pHyWjloIdYWK7CB#Kj5>_{Nut)j_JblVG$kDUGZ}`{s~ij)XXtq z0#(61ygqq>=6AsQIkuQ%g1x!DFmk%V6Q_C-He2VibRhdtw*kg?bMuuZ6^$vi$Kx2= zol9u{qUu|0)Z0h(8QnnSiK0r+9XWdTb6J_S- zt58gWr0;cAClxG4O$cMFxui`dF|*MC8v0BP4H*J3b_SzCf}x>*|6RBUYSiF{B9=3b z1!}%Td!4nW5n8zT-+zV{QV@c@gQ3dTLJ-5t3JQvg9T1Q+NzKOO^LBGk%MAnh(=tBp9{qf?)Vtd*VGQaO_c`Q=x zSw2h(WNE;xZ4BDeqylnycPEDaYDxo{--Z}i%IX1s#&QVG(D%`Cq1vC+-%_aJK9f8H z=C_PcL$v0(&L5id^3}C|wGihN=Vz^$Tevy}9Q}$!qWsg z$NAE*XhSoDw__-nG3*O+U=!m59U9)y(OYq*r!DJmgfqZ8?$d^K8kIATh6&j9sky^T zTr0m^9%KcVH%T}4CstP2xHuEZQ#m#38vagI+yipfppFP*pvAIg*?+2D{=nBqL5j*~ zL$HIuU^o?c`Ck-n=5kVYmB#gNmDNK+gu?YOW|h_VZ!L}6mBQgR!{~qC$|;~XF5>X4 zix&DLY?NSa;X>d6mJ05OKC{lHv4xC!(p|WDr}LlpX*dlJJ14OswTL6YXz=IV%EdR+ zU;GLzJI+~T1o~6@w>o5&#rJItYqH|jFBGARulJX`mw{6TU{E(Vyoy%m0QVwmgq0Gk z^)FmJ9>o3aE9Md$h9%6JY=d6Eg4Cu@!|Zu9mZ&z6lImDB*9E8Sz;~p;LwT7?Q&R%9 zA{H%A^fA7AU9kdRQE)+CLi~V5b#c|ILU}L->7}AblwGn~2^8$+Z2`*V@ zML)NufK>@#)z^Qa);f|)ynl7v+{fW#>+rg<;Tx|lIngdds|78cZVP`OwTNU3E->r}9THk&f%Ha_t4cVu13*2gW_eKc9p@I6T zR&ebvYA(qd^=(d0!dwPN=`Z5d54B_n1E%-N1AcFPiYsbwO}!*cQ7UToIvklcj#?}? z+eEk{jw&*D7pV4!NBVx3cv)Nht>9pp_vr;_Ov$dzno!(*zbi_93>sCq ztJsJ(#U`K1C_nEvFN-LWx|d0;@xM$%mLDaJg`M2K4k4F;%>&f1y9#28ur>Z{5_zhJH?# zG(6?9uC{>jV5OIAt0kPJT=>j0$+I&sx0G#Fal6T?b+a27was-;x$LX0H?K6j=q;3_D7E*o(@ zlRR?)%e_RNp~n#utOKr?M018PP6f4URs1w--{7ypeS#n8S1+)Ps-y5d3*sMGbp=@nIWz&i|DvF8|>JAQebr|Z`tIZOv`2k zPQM9scN7E{mihx769S^q5Jv97Ug*}okKT9SUb>2i@L1E7~dm~GHd)7$W= z&2HiEGM7Dj)0UU>}uMf2&lKtY5YIYH<~xJOb8H+^5dpxv;R!GE{`qnb$Ei z8Mq1uH(7JJ$xOh$3VsDy3NZI!KF+G3u2U5pECdW-+JwiK808$Mv)u4Bg)ljP6K4!mw zpR9R|AL7izJH*=r)nRjUcvfb@*qafpp7(Dg`)Bi4i~rXDLX?a48)Hs`i{p7p($tw; zV0#dbg_l0evscep8lG;Uy>$-ix=F5BJgF79hnT)x)3VDYR+z{T4)7v+{mOC=z z8RyT-1a$77@FLSP{YiVnl=(ln5~Du9I;EB}w(`{B2EnXT7A`$#A>hNbcriZR_rak5 z>4WgA5UY#veYgV8K2efumD=Fsz|4T{@$r9p>j&^7Qt{pScrq6!@dFq_Qxna2xo5Q8 zBg)G5XhCVQy@I}57N;;h$0b~U6rMA&1Nh0_`uX@>vGm9gF{$preu6({pEiHp<$^e{ zoF<`(`}@>a=T3&_n!$aC-ea%r4Is>e_@BPL|JzPz=p=!LQp!Q1k;6LP9gk+eV1MU0 zL~^}7idxY{3@mCeVi5fC`�Eo53fd-;B(R!B1iIIdcW8p~aM%r;bv`+4KtJV;&Y# z0SPPvW_k-m&oGsML|2aBiewEPO{VbG13B|^8Ze5&LXa(Lw)-xC00aPpzpf4P*{R;% zAN=w-AcC9p3~>J{^|LXM%bvsFI4%+39{$|b8B_I-kr=~j(P~4C9r)0n#KGqA)8z{} zq>xeY%v<@N=qhob**`fWa%>CO#>Gyt*t?l;(Mq_6dSepq_uvA_Y9-dnC#NgMb@D|d zt!O1VeSEO_XR#M`0G9vUn?^l~F-kTpmuNHC17J|=r^b!t6f(kOjLmtqV|bU7^$Wn3 zo5QZ#RNKg0JBzF$+tN&xZPKxE9pBOoS__Qv)@_O;smM)USWkDHZ9eCoLgi}Tp{bLy z5yLadGXp4U(V!lJAlR#GwNRINZCA7dXI{Do9x3nalkr^cPkqB?{<%F+M0t5wD4Avp zY=0wqlS_d*E-#%5MZxGX8OQRUNuH&=N=}F(1-2nTGH>x;l~hWUkAUn7*+@ZsZ(MJE z!6)$(nO>!Eud%-?Z7kKu8@H9SB?5%CHqh2Yr*5Ul?|}Sc8Fz5bdnJp!6FFWsK2@+6 z0I>R-=DPmHjdeB6b43yCmKiHYhyQB~c+{S#+WD+9G#%x2YvgO{2SPp~L zwsc87=PrccxW$4KShWsLXJ9&pKzCClCc4{5?KH_R?!U;x8!O5FAyy-ntH*LNR{QXh zCQhv^thR|W3^W2i7I{<0hBpRraPC}9ZEcNmtzn?1hS0R8Oz`+mIjd_NTqM^#!0rN? zm*Wc^#@Vy7t;f|hYnI!s;!)R8gX<@h>vI!nqpLbQKf0w{`yPAR{=L%-x{*7sGDvsE z!HN0>X3x1rU@yupXw4otJE7dgeJ^WgwiHow$lNkV&R3MYas_mlhAdF34ycU2aiH3@ zC01|YY#o>S;Zxbu4}seqOyZ1X7hAj6Zvjs?jM*Z-=_=6(?nO#g`;F*LTw}Y_G{t`a z3U-_k>LCc)=+*ne9pIO5=QJ4Z-=|_?sI`EhVUF#~FEtj6;54p(cgEFK)znnc`GBDh z&mF7ft`v?q57B75Ga@cRXCvtllS6-Lu+Ql>lqFOiL08uSw@dtBcZ0gsC{poG52HMR z0uYA@fn?mc3@*I_mt4jNW^&*FzN7kT_c?HS+?~l73pJ zR}CJ3IWuqm#D2G_Wz-vJ8HATy215~uPDi|M`-n>cf2T~NpBJ1zT|LvgKOl#d)&HiEco+9R^Yl| z?^sZ_bsfg#p@-kehqr*dDcDVvxiQY>G0&~vN!L%Mb!WGZ%C6bSL~mluBlFI1xbw~& z0p=!b1Cz6PRN>un)}8WEg=e=CBppg$)X)@K93E@6Ntc8-g&G#6L*V6%ws43&p(jUU zOU(0Wm~4X0Q36GICf*qVmd0@85VL0vjpi%v{;gi1Vsg7nGsx};@bYiKg+abn5-+2( zF&fP8tIF!;GF5`ogoLtLN_tZa=!6;5C2{*-jI*k;>oEj|U=|I6X)rTili=03ojt&G zPQV@c`VE_=iEdp_3aLiJ2cZ)|ALMO-avLBZ{m$DnxG|}jU|_~ISGq&tw6kCOd?Yd+ zGr#+Kgo~aCoeU|BJfqDs+@LfDU~@$Z%J*47)nwp!kFR&;^Lt!i7j zu5az0+b`CVeX&VHJrTQ32&UO%(+-R4X05BxxFZTgzw9L1=lW`R{S>%&qs7|mOm=DO z#59@_%M<0<=*-;)yJ0trZWZO_VMdyKzRk|Uh1{@mc#Jxi;|PuO+5&lo*`s?|>^+9r zfxJ>*S%M99(82v1X~E1sGAgFP@~xhen&-7FL1CELF>Y$F$7L$ZtZyiyvG-+`nLMuE zaZ{NcFpL)H$6R?NZ6*2wzUy)zEx3~AVR9Wi8=Q}r^x;bAk{~9%SQSGV!hHqN6 zy!%tNVBD?MD{#F0qc+IOGP@I#%%5oa#gT+Nqv?T2Y#;~|4!o7Cz%gWIN@&L=s|`=ihHQav zCo@!G_WJ%yBONMwbIaXmte}2Qt)TfPABSz?!g>gara>Z5E_F`}u7`WXnJHNFNBN72 z=L`eMERTNwK5NR1j%rXK5J@nKrw@MIYn8JI!|F7RKc`zix)Qb3lDXOy0a|*VKd^j} zfGsqPa3r#$Q_n)v|9y<cj#Cd1`{w43n1*n)nrRNC9!F3z15D5pmtj30uf zGI%InC=rr8vKTKe!iytxRVtesg<_HLMIwaTYNVw=z_sw?HVEYkwL;$F4+K3N6k`TC zco5iw0Otgm;CP`}!0wIws&Y#|iG8RYd=rYb)I>GkU&sr$jsHsYZ%gS@y)|jPmdIYV zKwLz5zd(F%`2``>FrP(_K;{{Y42r;RGDJRPWwmVjo3p*8QJLcV zb|3GLcP9M!Um8xNG7Tdebpe$CAxtclUH4DPQ6b8VSLbE;%nO8ux^l?^-lUM%#hqfZ zG==y5w>6n+1R}T8PWoYH;UAldfTPEhI;tH|B~)SR#AuL|MJ8Tvj@NnZc$$Ju|7|Qr zjf@G#Qe4-_SiD(AW2QG)PnlX7E#Su`=I|_J8IJ*o!AhXpaUu#+yowDs=ZEXf1meM) z<32cU<}r`6QI?cfEV;pevye1mjAP6|b@f||Rnc!)24gc@H>hi9x*g_ilF4UnHzzw? zIA|b9S)q{R{$EvJnZylC8C$F_=V*9vc|HePH*BR$Q@_O--*+J$4)Q4gOjTu^xR}9M zLda8?cFkG%=hNFd0iQTKXmC7mbssWuAutF+Y8)|U3QBJ9;hLiN0%T&`=F-d{jlHs6 zUH|p<>L{dK5|{NXCZo3H$#~%Se-Y@~54RrK{@>x&{8ZPkPtt7E4MLcN4560y3ZP3G z5;$5cVxAw=H6hyKhEw%GN1hFlEmliOk03R=|IxwTKyHe=J*}iOrbPihGUm4FkSp0H z2Bmy-6VW_&m0AasKi7hu3r`VZrG+9r0uPtJC7)?K>WXRMo|&2cxarHk`kVgy^HvGB z0KmY2gv@1eOvTnwEqVJNsyXnm0lMH%jI0!THeCL5O6L^hm1=BKgU8Y^EaK{od8$3N z0JWAzrB>%-%YZnI0b1;3qa4>gyewNh@sLAi4U1wJ;8s3kDNmsRlEg~j!pbKcPM?zUmcExMDfl9u@6u_E##`GDW$Z?$_ngzW_Q|94VjNjck zi@@hKNA3bRdPC55pjEu)!oCddBR-YBxQ$MY^L>hL5J#7Bj~O5jq;i@d&IOR4IEjKi z&r&gNl7FkuvBrYj2lO#Z9$r?Krc5CR{++_%=zCA5Zo}x3BV}3>_4zJ7C=u39UE9JU za`H@AWNBvY>v<|8IZ)O;l6zDKX#xN~A&$f;m|fouf*xW}3sR|OvNd3de>n$3W8B1V zbnaLW%d^O~_*H^O)G?FwYo~gORjfp9uf-hTyk*(SGM_;{D+Ahqsj7GbwgAfqHZm)+ zGSJ^QO*pH6KstSq4O+dcm@Q`5Yf~@6BE^jC0-5~jWVYd@Hk#t_BjE1i7h8ygzkYG#*b2sRNT`_Lal`|9BK?zJ>OMBcWn37X5URa6Ek7sqkYBPX42VKK@I^<(MigOk9v25E;uY+M?VdLQ9;lmL~6agU-F$pP|FySJ|MN&{w zQH!DxErwRCIPnrB(n*?Z|2vILlBF=SNR=jChEXzQ$!29^=j7t%kt3H^9-n*#3i$;T zDHaq`qEwl36)II3ty+y*b%B{@z9n{=V}(HvSmtMjEwI%Gw)vF9jwH)xJeFo`!k2u- zeLm-*3^Q{JODk&|TRRexQVJ9*Qmlldj(Oh+?>VmD1rFp^Wri7UgmNQQs8preC^beK zqt;mCjE^3oV&W2#QqnTAa`Fm_N_h|!RWLXlV^mB|%Km0F{vt)r`_Z(wL-Y+`C=ZeeL*^f8f$Yn6r^hj=Rv#Vm8fi_XD2@kED$rI-AjHJIiEVZ%#jjrfQvnu zrjtA^1L9IA3zPK{nV9P>keOI!?U8kA=Th|S8CKbbLPN7n<#u7Q8GA{4o4U61Ajh-O zSFU-^`hD6dL0V6!I(d-l5|L&ABbdTu*6KSDt)=T$X67XpiDi4;ZK}r8gv|)1Ba^uR z`0m+Fbb%w8(Kw-}Cqjo=c&c!@xI5-HRGRdukOnqx7e*sD3A>&dDpTwxNaIfH@ZRcj z)4MzB8V6z6Y&K|~kp{f!+N@Ir7jsuyT&a)-F76iY6flDYQXvg&%u!)8xxuFE^bIb( zQ4jJy09T93jzG|o^1~1q+G8C@0KxBnlb~lpVGXmK_Qj9qqse7}!yWiSn=`F^4s$us#6Mcu_;pho0{r bkH82T%!~T~dOL3iZSfI!+IWoKhyte*`46Vs diff --git a/fonts/KaTeX_Math-BoldItalic.woff2 b/fonts/KaTeX_Math-BoldItalic.woff2 deleted file mode 100644 index 29657023adc09956249f6295746c8ce4469b50d3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16400 zcmV+rK<~eIPew8T0RR9106-7`4gdfE0D43K06(+<0RR9100000000000000000000 z00006U;u$k2x2I17PZ00bZfi3|sWeGGvz8}4HTsSn}h_&&m_g4$s+7>x}(e0b|zhiFmih3+Y z_JHa$ux;c|a`FyO&iVK5;5haj2M0Us5CRDY31pB2cF2N2#x@CA?hH+IC$1T5oL#Oi zTm8l{f35!3j;S46ZTBB`{Z8%g_kYV-Npt*qbNf{)Q`HU6L<5cyhmJv2>TM?E4I!B; zTrs$#{npsIL(Zb)U1m1L)1HRv;hxQZYYB2DMJ06qtE=2;?F$=%RNirU)ujURtb7>?5s{7KnM;^!<)4(Fm5+BJ{pbl7Y+ig#pY8WtNs@R;Tv}Vb2zWA1jQzm*#o`3DI zY!Lj&klRuUKmwu&j_kk{H`eCp-_vbX8mEgy4^o9{Y;D`8bQQe5ncy0wN9SLFsawuhEI@!jD6#EQ9wi)K3zoxV;?`!a^soM1A0#+O2q9KiRK~gx40mr#h`^il zZVIM5HcJKuSG5?>jK5AE+syVBx&R<)q*ZxDgS_aq3?!Y?rd;$kJ} zznvM-#jF)AbEqu~X<-Qmg2H62$`k9B)$6!d`Tf6NKjrJ0h5H=%>B@|McT8H*%y`vb z(%rk>@RCE*6N6rnrAbrV{r!LUjAD%&0?-v-O(btVFYk|g>A9-v%&i0jKer!j?XGS& z<+bDDY`-WK+F91kb{WD=t*O*|E9?6lh zmQXXHp!8Y@NHBUw0BY;l5r$Z?jtJ14BAd4+e3v8LqOKEP;%_?Ao?k!C_D5stN`Qb#dilpsL~Lt7xl?!e&&4S2=E zR{2+JNlWaH`b0~xsXo`8-vLLO+@wrgzj0rcEa>Pjcu^EFD>bx0qSJ`!4s=W)SB7DX zyeW+chsOzOWsuXMyNTP2sw-^>J9!)tN(MreuiV;}0bNt7IZIc#j3n1`#guNA&>Xg4INZAV3h}GAY<`bg8ox+~@ zEJqxB2|yuNW{M9&^Gdu^OA0)0gm8&_rxmUklFo)kf)TlsEy77;Lqu5J3xzT2=~ME@ za^gr%^4no`_dRXwz8N{T7zGk__bri%5HwFB)V2_IUxXhTJ|DrhfQVmM{8&nc`@9M2 zXW;`Y>&Y(L$PqX$=~u5($l+$x_;fizt0k1976`f_JpvLcZ9z((ubTuD1zh$5Mds0` zz&!azxO&7(+3ute6E`Nj_ec=&1{1U*o(*p996V7+3I&pM9Hm+ZM5e<;t|LUDGl)`W z5|xe;way|M9U~b!dwMn`4h@Oc)x9972tJC&*aY9UL5<3vTYEF-@6WWD;D@_Uf_DBs z#D|;c(4t2%pcqk1C}xyQ6bnigiWNO`fSS0@6sQ>5_QONDm4#ZS})jb=GnDsb<++9`MP5Y5ABCCsK6# z1OcgPe*jTu&{d@OP3B5o+H(0WaRW7mZg_-({3&wupt@5#7L1uiz|q?Lg($F4&rmf* z$WS!B%X!n#M3@kc4ExS+zAZ>;>*i}1Sp-59rFWX)PddDH;Yom8c8+t70d+3;Or*FI z)w110%}-KbC%4S+y9uWZomHd4JiD)+af=)x8zy=(h%+`qq zmZ*8+O%8%Zp*R`>iz92bPPMa`3&xBf%8CvUVcQ{1?HmCwk#{g3!1tVj8kNlHDUYCV zCf$!chN=Cl7$!5g27GqiTIP&Rn~YVsBsv``>Y&5RP2xNK$2M`Sg*GAhZ2!et{QvfwD0HP(pY?U`~n}OR6++i?h=qBvW(Wt8fh;DSXe-#52$2u#kmt|A1y7QWd-)-cPnK@ma;NS8P8HC zHlbAow7S5)rnEBFW*O_UjP+T@hD~T=0c|R9ZnmA|9&rCZfhtcjU?kjR&?$Az&4KI_ zSnmnMm{_!G_h+6R0wTPR5NfxX2gN>xR`3V}X}clF^apfh;T(gnCpvu?0v`_i$`RIJ z+Ei@jh**$?k( zrfOsK`lg4QEV~?;Acltu=zD_V2Gcbn0mUMMVXrW?ONwM8CNz}N%W`~)Fa2-mI?xqH z!=N}Tl>ha&5U`|`{o@E=_R_WwIpVYF@9)~n#%%{z+rHfnV>?n^r`pF48%*NN(_bN4xTXWen6;A%fKBKh1AkiwDiLZ5&f>9 zg6qVZ=o(X4(&5o8S8@M0zKaYHDqr?5a=E zEK_S6$4%#%s)VqJaa^@Wc2!dE(cH(>hnNPUfp4xOAMInBCg>BMxNJH>Vx6eEDN|;k zWsNxONPv6#KWMji)PKrkuxR;KDvp3|cq2+8OEhoN0yNqtEY33b$_ElD0u4qA8=%$w zrdX|JEL;}($`?0GP9_Y4R3IJ4_z#+i!Z&M|Cbq^qQ3x!+n}|Nqk6ZrHqX!R`N~Ii4 zD7-q8SgYl}cH)xD{2a1TONpR}Mqp5s^kiCvHD!ZaZO|>_#-ti&1=q5@&qQ&tkVxIl?8Z4h|EbuVLwU+pt@owAm0E^WOR5=hOs!SOS zzo8!zCdTiapnT20---od64lN*=@I5;d^zd~UOhY51+b^!Y4-`-{PgMza2~SCY|?}- ziWU^4tah0Mo|cbXAB;O~U~nrmvYx_@S~m}jRc*f5oo*DLdJ%FCmh2w{u|@%=#s4LH zuz-N8_2!GfNKk?7&sfh5&W6yEVtNgvS5W`T_^ekX-cR+KtghXko+AH|f3eI(a$I4V z-$?PV=3h6i(*|nqd5=Qs328S_{l>p?b(wGOGEKf9drHgyIC99<0tT*Dd=xMLMALs4 zz~ZI|RQt*5Dw(pa6)s1w*#dg<{{j$IV{8_*zaddF@mwSDtR$a5!siYB!5jaZ_!2+l z!GLS2*Rhz}ED=hmEUj$0f%`%wFW>3wl5ON@gn+Z$C|{wi;Xe1gFebxk3{!{ICZ}x5 zy6}uB%p!i68ptk%+5c|NWSubWzH?q!Ur;VE7Fz9b zU#Y}Tf{TQ~*=rojW{X*8c9z>Wh+uZP19(*Xk5I%S6VFfB$SXr5>|bN|he758U|MC1>v`4Kuj+J5F5e=O??MH`ZIJS3 zP`wEK?CCHbTC%q?E3Z+e+Inww88OH?d&7t^n{C?>;U0gb9bD`y<0~?sC`A51fIQuP zjpSp0f+q@#eWQEcr8pkTz-BwjdC@XgWwYRfN`t%1My+#D6v3pjAbl2=FUT3K^~_X; z-)IRK3&;npTt)lwr~Mkw83e=JpAF&P2&i(%_q{I-9wNP6x^Hm_T*K$A`&v`qr72NI zAT#W*r374hzJySJ=EeRmYcp?SLp8c=C1gpvw7P9iNfD!OvU_WbrzG-%o9(0`_u3WDGFa2TGgpJ(A z$gPglL(;}e=q)r5p z$C&ZESV}y}fXdDSBS$Tq#l4Uo6w|`O-S#&P!yA!Xtd`P$;ZwWnf_d zCPTWG$P9uqtUnC)sO^n~XLTIotH33S6oNm{sx1*t3HGAV|Adx}?W8^PrScYg!g`C5NLJZnUXz zjkx(TKcyL9VuAB0#5mUJ^cW=&%2B)4JHSt#7w<4FGE9XwW1e2l#4$Qi<-9n(Ndbq> zDA1>pu#v21wC_<6Z)9dssviDd!Plha?NOTdBUG$;%&LOS#8SJ8^C5^_&O zZFfZ+rPymKad?K45-M|L>?8*G%?14%aRexz3Xef%&~Qze=aUy2x26^Fd7#`-@81lw ztb&CD#SN~Qv*+|TZDJlv{mdJ1`Z8c`e61a894SihH5^)_htbfyD|5`boYb!7d5Pq! zR8ms_BZ(+_IO|0h8hXSu!De&hVR?+DHYGrL-`$e!iLPP+yzCnT*EQIw``4Im5yIfo zHwm_9N(T2vHL!fcYXwbK=0g{+KuaGHa7D=Rt&>ouMP|TMt+SDjx^u%D;Rd=Jm#hm} z9Wkw}<4w!_DTn$Ikm2^1=n3pLLy%fyWk&mC4Rsr*wedJ*a*eqnZF!5cT+QXIuB=Aq z^nqRh;hf5^;-J|F4iAO;Fz5p{&X1ejZHCObnYNyp;x0tFGFc@P^-pPuARS#X41}la z#yzkwF1#5ge%dZk75)UGbA#BubbLSl=PDr;*tRIjd+`RioSg)Up-}G5_9TUx0;g>? zpMi;hvTL*62<32`S2^s&Qw-DoXfIQy)EdRo`Iwk1LI3r5*!&BPoM5l4OJgL{u+ItB zmksAdF5DI_yKMF0T%norSxNWfvVj`HgSuuLfgVuB4agXWSf%fQyA6PS&@ zYy`e31PHvlZF#G$W!A(?)`>qRFO}PE5OZcDIhQn!FDOp-a}^hXqpRj!&J>a5XlN2n z(!Mk8&{Vd!&@$hm3d65bph~~cv4oQ~Z^RwlU9C|7dr!n&I)@79of-(sss6QKrCv7O zxpQ@TB0lgeu1>bhD%x zCRsyN+PlK=A{E&666s=KU8n)e%ysM2HF5cvJ5=lCVZcd75wD7?DyNU~k{!xe3_ z_tnCtqhWQMmiMS2C^sy-OJ@Y}P?5BBJpuX_e0w4t*tTVZICA{oTg8MjI|2ReT<@7s zbe^vKsJiSluHja24Zox_G_e!Vd(NBFrsc6($Tp8sF4GPB*I3 z-Eu@eJc4}B>#{hqAS=mMGK@-w6FQUx@f3%SpLFYMwfyk@qxEV$psgl>mhTC$snT%g z!aD2L8J~qt^f)l5W}My7{l548+*C1aZlp`^Cor15-g5Prw%n0OS&R;yno~ow0gNay z?SR5dGgdLRJzO>oTJtu&2voqcAcdW`1an$ylZzD*N@NCwfmp}e8VyP$IwZPZt*-gL zhibS@3G3AknSHpHW?no!$pSw_E42yJQ0lDRgTb(#-t^#Ia zE>Ibs7ZwbJr9IW1RRfC^EQFfVvRg5+o7PM#nuosWc1Ke-jzFWWT8p$eCQBQ;CD9Tl zhT?vr81M8BT{U(Zww$@4*RRj$AnMOFk)9F?-;_TzMP~xGX=9A>3mCglYeWj$WsuiU zNG-9RE7zF$1gUDU9%95iXmhMHl@$ekaWa(EGKuE+@S9vMRJ(ZHL<5UNqzG&ILeSPQcPQVt0G1u<%snZ#+RfxNC5_a#ZkrUB z%?xOP5$)#JjE#`_iBGGZWsf{#N)0rHCx90dMT`2FjYEdR zu`Uu&rm;daO4$z)8~j6LMH?v#E)#I{z zs5<7Er7N#oLZg(I=Xsvd{m&%$&nUn|G5`w|G}#2pd3YQrG0>-^=R`JY_&%-pu#x}A zh+YjFRJZnGiqn4EeRcI}#b#10@;4T|%AAZz?0G-F5A1S+O>zHZPml=&W-X_1B<0!^ zE#AsNMnGUuEYBC_IaayCi>ZYCBwD%jolp!Rg(>{_6!PS|&gL$Hu1JOdY#u=7tr#H) z3NA(xs0}Py(t71K=N1WImneZ{RuMd94IX7EMK^wVD@88x-?0|n50D#-VqX9iqQ#l! zDa5$E{<}U)kX!$>6|2LCIRI*w-N88K_7c{cWw#l}dkq(^L_iq5U*<-{)2~WgILP7K z_&R+ek5G)t)*r%!8ZKHQk(kjdl~YpFHQcYjtIXA&#(vq*pdlp|fUzuQ>v_6m>Y~;6 zD&To@qjl#nrVluR^Y?geX0iv4@3gx3p9t{HolhNn^QF$d9~a*mRKQAegth8RSlfcr z@az+Qm5pu_U9r*(*6n;AElIu8B#K+RSt5(5bVcXNAU~t!62n+#3KywdzrJNtdzqVD z7yIE&xb_U&cQ(wcB-ZJR=rH`9Bpsu^N}q=tyR3)eP`67rnCFwBHGj~oMt72Z-~vK1 zVu%yZy+$V7nUJN+Z&HBjoF32xB8sz<*r;)!`*M*EIu%8 zc`n~x_Pu5BjKhR<1w>-K0n-2KPPFG>I9@EZ2^Av?ydwkIa;#J|=fgg($eMzR* z7;=_JQ|NZWsruzoiTeWVP(kKN9ppq4bAf7)ke|Bs*r1c5d&B9;!;+j-?=;w&her@D zMx1?W9A}feTCxkevkf4Xpt|sK=gn+>v$Kn$xi;1{E8kemsH=SYOh2+&MUN60iM2Xn~Y7jKc2U5Xo0+k%r zd5ib#1`h;~9|tkhP76AfnFkcAw+A+OPxN#DN_#Q_<115kEiIij>rv=Bclm&JH%ZWI zSS-zcu_Q^q_PVaSkf4ID!BE=!!}pNU8<+fHwXp!Pl~kZ77Qqfff2dzil)l>^sHmRh zXgjZ_?|%5ysW0oqONVkpCx4!6@z;-6aQsZJ@nN&^?|SPCP#^%M=`-E=;p|aQ<-9AK ze#te{Jz}u-C*t&W)~F?yWwoOpUft;-*@Crx2fb$9S~_VGNhwcaGp$D$jO(aEmo$>s zUNC3UQ;sP*)4axzeFOJ3L@P8srBr*ni z)Pd6O+$SR8-l^fC)>m(Pb^QiEtCWzQ_|PxXuXi>%%2(W}?>r~YtshvjMkuWiJ=0e2 zhd{s-QPjn&mG7Wai9&{pYYS!xTj72IG1q48Jif25I+%{V7bzbZthlw!*BI^Hz$J=* z2xcTSE^nSPlXWBDmo>e9sV|V4_p2dreP9HN^Zf{=BA>_c5D)npfym@NVreFH3=D?keIqZr`w&dacO7X^{_t`i|h3w&rbM?4Ygh8z_NKe+XC2=mWvusAs^1c3oaP1LRGg9fmJCCsoiM8Hk{ z?kq-GeK-B}HR#9R8u={aceaKl8e~WdqeDm{&X2cQO>l;PbxkvK{LVLri)cpue@s_@ zTX3Qa>Q;|w#^AaXbg%_CG#zj$!-svdp;_8B+BFc|(*sR=0~LF;9Nx2HTW71_@Qo|l zS_FFuWt2f8&s-L{@Kw(a0(OY1i^3#_^{ z#;{O{ZOc1lm-2h|hH5NzjoB@pkx#dw_B`#6ZjH}mEg#@@Vp<6*eE8)LcFMl`>@sxI zg1?S!4}~g%Ae0h^)=}%z zN8wo0m$eu)X6-UoiFzhERHF&73f5e{Os?)S?2Ktt_XNK8SFI;1qWqqAD2X7NG4+_? z`mfL8QO9mEL9b<@K8DymgiE8I+*u-}`?NEmSu{)FD=USIigZUfBpsHxzQEcK#6*qS z?|&yPmWqf8gOfHG5Z7xU#9{~a8?c_FG{er;F%yyM?amzMg8cqi~5=UZApsGcaP8&Y?H91(Mw z$c6i9TD3s65KK+ov%#w`$y~#g%mkU{G$5t#7>ZloW~Zmny6)uU?98-sLO7k5r^@MY;{$Wzz{lghuQ}X@QhpaIembKa zkmy(>5PDo?FaEjoF7#6ze)cuD^^Y16has{&kXb9pFep_&G$X(9v+Ntbp%#Ay18>Ru zY=u!tE$UhIjPfdHq2~izVH55|J5l<51`CE*7ompfhQHyf>|CDIdTnI53l%j2#N^p*b3Kscl1Y{iw>PjYJ|=C$+GBh=VZuA z#xz4fA-h;`am&g)^)!tUVl!28Y{5D)J{%D2N3mG{TdPhkF@A7 zNr?BAphkZoG#3u?dki+Bkc^*^8HzhW&_>+N#MA%=CkRz@}8}W_% z){c`*-p16tlGNq&*ysa2WJ`}aD2?PFovfb~IC-}+kt%m|WRaJ(!`emu>guNQ$j7O| z>~TdEw{j*MckNCNQc_k>tNY|j2*x`@?7GT;|DwNPjg-*~bt>jH{kxGq&A%6%B$FpQd&3vafE2R@r;eN}(8#7uAmyy}TzyHIh6KCLs;5Sq?jYFTQbzh zzp8C``r4tpy{cdk=d#iuUol@j1zchEOj5MG@zuSoVo~H*WEV_xp?QwtDeXF^n0QR z5hJ1>twUe{QwR7zPbeTH5WfuXEg)F{24iqoLe!ka^CJ+0D4>2 z7zw1DJ!mNTjPf9tRohKQKOS114nb?XNwGg^D7=Dfy0z(Mh*-D^muL^8lsV6w$1s}c z>YPb^Exscyp8=$@jjSq}G6Lqg_A_!T3tI=CY;A{)#`VwDk?1hY*emH0+^l$eJOq%{ z@Azj0W=$0;2u4X+bXc1}-zVUnK9YpLU}Bvo1x4nmbFd)^joUI*RI9D_$KU>{$g(ZP ztL=7rCkM@jO9*#j68ouN(FbHiDWfd-coEJpC5=e{;)z9zhP#9ZF;9uX`V=&|sT4cL zZw=qV>kz_z1?gdrdfE1Myp&%!XM+{qQ&IOOy?amRl&pce6rJM<5Y*Cr; zZY8FL=Q6>M(6axIO}wL);jH;apif(g_qj+NM?|jXlO)Ismcjk~5B~R9_~Dm7Y*@WD zQU!Hhn~}&g&hzdPi9;zi9Jod1`*chc8sTKaQZXPg6{h+u`FuUQrBl;_6eDhJHygdl zs_(9=)$PQ~yXS>uw;g^*9+9e%OJAkfnk9zKc}$^NBw4_0jHd0#%8WRYQ?4GR77xA(~^ z3}*F=HZ%>Snrq_|Y}}j}4b3dkIG)za?oe4@FNDomX1~6;Mc6Y(8Sj|*>-*trJl3W1 zsGXaGnz3hmR>8L^AnlfQ!`cQXD-ofZz;`^-Y_rd!%Tw(u0wt=)$C37-YIY@)Xv;5; z4?M!9hrBgT2M;;>{fm#95$n$TugUjk(3_S?0woZzG(jETU@xUiszEONrH|<*n%LR|;674!$p*ILlQhMnBQ&KiA3sBhzl^1Iz@+U$LZyjnt+fWb=E)(BYL) z7?Ld0oVcu6u}=Ts1eyD%MgO^8b_e~kzPlkV*5f@}*AHN{zo0z?0|JNQeP6+prgiIe zYcD^mRYkHEE<$c8^tTQ2n~Kb=aj(l2SOCBE3;?IEcFa-P)y2ohp0pg=JaYGu9NJj&n`G@w+dVNaqKc}$U2inV1IYR%RVG8XxLK6(lzrhn9fQT? zC!9CGkN4uJ|A&Sk%%Q^YG~0A5<|Mx?eh3A$>`h7)Tekz1-;rrc({r7XTpK0_U4Mcg zN62G8SO1^ev!sPT6{wBmS-*P3B6Kp<`9H|d6D(9`O$77xYkttm@5t4k>7;)Nb}F*h zn=;M*zrLs$toDvxI|Rc{^7!w9`5MV$s@6gCnyi!9ryJK}BciOT!eXL}bR1 zwFhM(%frGfXE1ArgbCZS7_$P} zk39=RXZ}-fn8%ATHtZF0^sA{l1*M$%qN&>@60nuxkNgWmcX}9`=(-A5F}+SF`pVFL zXSLLsox8Q=S+e-&!njj%SHjL%ty_=CMXH2}lQ@**HR^4t(=BF*<0ee0-(H=mS*BSk ziKBn9(j(1{a~tb?WogkGa*&O7E^4gTjEhsNM_LHx*xF>v?5x2#+$tt6AG^5QS$S-Y zD1iqsJ1c)FWSoMPs@-k?AzlF#@*CXe|6-cBgskZMHKMA29k-Xj>;dl+k<1G4r`ZO; zFS$hOyX$NDCB)2!wmVzYABerOQ1udjk?<>g=m)ZjOk$s~xKJNNUnr1@54(SQeep#W z`VtrRl7i^hl&9eW<40~Q{V7zylPZe#t zW}GtZ63s*RdLAlte|F7EyeNBNFm(v*r_9+mZPEFb&Ps09N+M&ET5?{Z42{8S6Y^?) z0f)cuKe7P#AIYNJkKJ|Kmo^`wj5mz(n~DPTIkc#P&K2r5>NkR%TzV&mI9KO(5#>aA ztR+YKF~ue#rK@E!(Drm!C7gD-#JbJ8b+Ak*S}sTi7K`SUV>!z0ACC8<)FsJX3CpmH zh!PPR#mE0U`7Z`PmU_LoBmTg+ zG3Ufa32PR;YI(#zK0H00SkIKDqE1&Z&m{WV(7a|J`v0M5NV_lN``O~UQh{m5kIUw^ z2((56zqU83UhnvFApZ?hum0!<#yLL<3OPi~x#p-L!&N`U0CXWLU1+-bHm?6e5KrB{^07#wixzbShT z#LOV>l>8y)rzZ=Wd+PuD7kb~>F4kW$$nHpW-=9=awfp=P!ll3;xR3tv4+oDtS-Ij+Om^sB z@4Vs=$ifB$Jw9^#yL5GJXHveOToPP;-V5c0nV5%On*mwEcHcZT81y2q7A@$` z1VplhAUnSKG!|R~*a=iK=8`0@?SNUk9)TX&5HY9@>Bp+Pp!Chs>!7l|b@=hOzJ{<~ zeCwe#D>WFWA@#@~3kRO&N?j+eNOC4Wb@a7e2o!P_&hQ?&wqRPh}g>$Z3%hri-?ekpg-wI_~0`Y=@ekkjuqEX9ZWMo*N<%sYY zkO!|gfFyUhj`X?o%je=74pG7byQQ$(6b9v@*HbGnc2D|Pc9pVaIGl3`>?`if3a)$$ zKp?O~ZWGGypg+e35saz7cN=;eac_GR*nkJ=X0y0x03`1?8L4$TO;nrcoz!1k%+_$lMsNUZG zsEfFYa+vmuH~fki{NtSNi26 zr;l*4dT^y9JmO&7Y(5f6>q} zGa)>ep+6elLHe8q4x8*M(-^C%{JFz>CHn39^#Cp`4IBbO*MB=P`5qU|x*PVgQl??6 zaVOZ4D`*tQDsn!qFWN~{zBGmwOS)^&A4_C2*Z{kc!sZm-n<37fQ{8x)Bp5J^L$V$i z6cu^{4w9~wy1{UX7fdy?v`iSD07SS^87}B$a1}Qzll2AbIoGc~58$GrZ6o{a`j~A9 zP@?frc4#LA^GBnisku2C!N1;vwZQxHV_%?}rAI%CfQ0Y&VBoTp(hqWqt{F&dKTGw6 zuGs6}P^6xDMr{wwalA;sG%-Q=5=b&MuAL9$g4NqaYF9X}1$*SFklIjv{jqgd81e}felRvCH9SlCWcp02g_|A$_x7LtN#*e*2Bq%z1k6zgq+R%SbEqXN`&AcfTK(YOmGbMd92PbiWS*M z+kz~>;W8vUV#1u7&xQUnm@G^u9!Up8EWv3ub9>#Cch^2XBdQp<|J6Ulg5L=7hg6d^ zloq5~{co-AYo2kmD~mw?V0DbN)R+0k{u}iRTUxUl3q4<|SUg6l0fl-gITSuH$Sk~^O zIDfL4Lp3M@9XzRM%aMH6AB44^Kzo>VV_p&6R+W+5mOT_yM@aNonLk(CAX$>f;^a=U z+?$TR^o3>`*5WW=%A`NDJWC~8O&awenW!c!DCD`iYyYIVbp_wLUiTEy($^^Vg11<* zd`Z2_O12EQ4_KF)X9db@YFjzTbwK_7sY8Z@3jovk_y=F z#-fjkc}76qxkyF9r?b$mWeq#qc1F@5X&9-LQ-4tW58gq*9mA7x-^UB2t&o{HGQye0b#J^gR)*Q8$*Qh&*1`7Zs}fGFAE z8E^cnlt<+k#Z0FO!<+KOoDs}ygIBt2<^yA=CqM9-*;j7Drzffgbnhv(%= z?n;CeYFUni40S$YM!)g}v;)a{#(oab8zs?(l*6T81@IrQL=mA_$jm-vKmB!!u{_e! zs2z69?zU2&Q0#1FUn;e0*Kal-UzT2rmhTqh>@~XALb9-qTVwG_n&PD&FN(M=9(&7} zg$C&VqD_XRC6o1(TN8R$>>JC!jXMCC z`sva?tvt#7n~U+=)%Y)k9L7RR!2}iCzgm{TWto@HenWOReLWXNdIe0Z6HV;+N`n0Y z5RT^h?t7V~%6P_HaETYrhaEHmW`EH56xFy_(z9GjaV6XW>cjGNGT)bs*a5@QqX|me zgE1dY&QD^{$H#mlZ3^megChz>l$dUoqv8OrDMG=XptagE9%9#~qN(}~Kl_b|qJk8F z2(n(<>M3$aKc=wGwY8>xt3Xks3U5-fEarz`^ya>t3VpySN)ll`CeM39z}uVGnd8eK z3^@_2yDa@l%-Mm7;_oSNL6Z>8E{%2(-Z>um5Gk5CsnGwe!T+F(u1e*Rf38bY_j%}{ z_oV3OtcHO^jcS>6#)gSr43Ix&<;ho#kF+VQweOea!}%5_H5!lC)@G^=577CG?klRC zvD!Lwd`dMJd+{Q4@j~qlGoD?0WV$vDL*h-6NmKnch4fVk8)3Ba3SbvS-wSO`A|}$X2$;)I?G>(tl5h)MDZDz?PefA z&5$$ruWg*OY;FsBZh!tGen&vqQGG#1sb{H2=HGVU5?TJNC-*60GGB&x`CFRo+(e#ch# zW3OO^R}~uW&AUG*sjQijcF0U2g3Irz=}2m2JGg>x8mku{d|nYt`Y*g7roy*F+d6I(lM z+3QDCrhU6-S#P2HPktnAOb&MCTtrX=_I3VuUl33*33Lbyh^sIpCClR*KbVMV=*p(d z6IPjA$)GxrBQ|0aOZo-^!?N3xHu|p1;d9!)S=e$j1!mF zZl9OoWv-^D?#|2RGB!jFJGtEoVB^BlOXx#wxbHxf5o+6VF_}QrMUy zw{Ez(s|FzO&Q3BbV2?CeH+;WN4LI(uYPkxR_K}H!@n2q1hw88ca03LwEluKHh5e7S zl{11}QHlMI9x}$qtbtmVUcE~fAI`gMw?V&pTRhTighe>RB7e3(JE1c;zKYeqoqa?? z1Qvv8Y)>9@AxH81x2fq+FZ5EqN5-G;Sg_#!8SKd>i~9abJr*`2{Svg z7X;7c8IMTXUG0m*crb_ylC(duxVW4F28FJLV**dpkJ=qIJY{q>3fekwvq-tecLm;n zUVPpSO&qc;z?bs7;}vawAd%q3oaxgqJFXREF0QPOZ=FN9q(=Yrj#N2^!Jj%r1teW- zu^ec9=6EK9U_r1m`;>wQ6s)L~!7ZIBE>aLgSiU*wwr5b5Tejz%KcCK2@)7btj$XFw zOmjT}!F8rGQtZiEJLO~ZCml95Uvvlnsbm6+7?pgOc@V*7CY*doA%kk3(Mj15YSLe7 z6SUP<7Un826>5H80R+vNFhNTsBomhhErc2tIhb&FS-vW;%dLV1saRRY;bd+m#YIg< zF;b#sZ^FP+RsoCJbn`G6Hf9t-24xgUh(4s3a*D}Vp*pBRd<2!*C9Rap`~TYL>Ngzap7zOP~KKw;VsGl zh?d`DW5ZnJh%60Wga8CBKjpP%em6tt{S_0Iu^$3K%btg~(tG`j<(|JP0%6cw5Mc)F zz;uU}8x3iW(82y$a~}7!l@_Sh?(M*3a{lQF-K9HpZKiNb_Zf~G>SeE6b~H~%^|V#C z(^F##dcVH=G!|*?wYm07;YK4oE1kpgeMh=p`3)5N8D%amhuF7^Y#;2GYx@MiS9uuASL`vFHt(OcSrWFLRJcb;dLI(s_+{G7h#nYyemTSDnI?dpnf2 z&K73CRF3|Oi)aP2qkdm`QVa&+)Y%#HAZa<0#ReAu=geD`2g_h)??q~q%mR6xE?GgG zm#q)UDX+1`#@JjtTx&kJh=S^Ev9=KK_NzQ-(I@k4rl{fJj56?l~7EUsyz^LI7zo6UoZ7>c<^96@cSc z32DO`o`jR5uqwU}=yEUFm95emI9kRT(FOKt_Lc!Yf)kR#{0KZ(_#@iz_^}xv#wt3t zUf=U4;shGkh0Kof{+Cn7ymt}bNRpTYMM_3aK}p5P#4M9V7OQMFb~$n%((kX6OP+jg z9t8>&DdtrogeMS5WD1o=XE0f84wuIl2t{IvR3;B3O0uGAx?x(j<9Y}pBryq!l#HB$ zk}5n%O(TPrj-G*$iCHF#ELPcUb>URb{wbmygPV_a7UnVQdi@x+S^ev#MKVip)try* z?^n;7ZgsgeVi$csj4wRWp-D?D1O>iV=}fb0>F{=-pTg@6*|1up@(uT9+@hFVlK^Y` z-=0c`uTqR2p8JXyyj!rgeBJt262GDyc`M^%3yZnhI34tsG|h0hG eto0caMqseOdLG;#8C$2}qx2NB2Zcf*0001K|EysE diff --git a/fonts/KaTeX_Math-Italic.woff2 b/fonts/KaTeX_Math-Italic.woff2 deleted file mode 100644 index 215c143fd7805a5c2b222bd7892a1a2b09610020..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16440 zcmV(}K+wN;Pew8T0RR9106;hZ4gdfE0D72I17PZ00bZfh;j#meGGvz8}?&GBPzE8 z1u0OUJSyttUiBVPluy!d#s9|yDnr%+PdDJI6W~D+hF7dn3876mx~G$_T&rr^uln5x z|BT}}4pu5P3e*HEr8*eDNTG<1F_;U3ZA=tqpJ7vDW=sX5YRKxDB`FY!LZL8@ z!bX`TSd8YAvLOreMkita9aZ$fQ$*@8r}n?8&fXI{KJWmoXc#0=X$40A*07 z&SI0gyJXs?ugX_CC|r4aZcQPu+bcrpYg<7f7bmfQRh4#o+@zM{cG#5I0dklZ z)z<8ItFoM}%JCB=SLYwy?cof?1GGfhVUk-3A3vRct*O30o6@Q!dZh;dM6m*cJeAl!*4z~IaFs+R8AEDeJOU?u3$8JXqZrnAU^RPh+F zU;a;L|F-we${!dyOb=Y9sM9p#JJMP+Bki*!?>>9v2ey3PS!?%!*)SOVy?*)743p>5skoe=iCPWL7}q;e-a9(G+RBfkb0t=i8_N*+ z7_N0U_z$PkZB<@dmg^}j2qDKj_ZiD2E{-^a@MxAp%Lt*{=gz|MvAU+<%dmy%)1*fC9?>djP?CJbUm!@N*vrK|8L#IrzmL7}7602l}4y?RTD= z#6!3r^5Ona!>#G)S?_S5-S-FtfgimK1aUrG4ns(0(V#gV^>8f@hf-*su5ukNasBL{ zgLBnfJJ-)mbL-qOFU-SP%s=z%Z}%T+!EL+v^$fiHW#`{Ax6jd+tNxX~{?eYf=hVw& z*ze|P61T)(fBbU{mHHA0(6Y^eWse?T|L=*6X=cUCf~xpmtLO*gUB^F|M`rr)E7$la zYxyr@6;nP4W&KEj4@Z#n<^!?2U!Fz#w7-M}Qudi}#A94=>6^>8v6fIfp`dgF1SA+- zMF4VjQ4xk%u@w=Z-$gdg7Oo{tgy1r~(YkCMCt-eDf?_AkHuglXmjgUD#^7k|E?Zl6 zkZ!J=UPd7!ZlHBtFii+0{EiNC9rFul2FYaUfe(V*>Gog^dhqy^X!7cW12aOln0e=^P-ZnLl#qLQ#7r3Oe7La2?Ib8(RC|iNyUMS;^To8AQ{T z4^tCo>P4-BzB>=fh~tcCt%`h5z;b}#Yy-Zdz@325a-~Rkv>^Ddl0S?_08q|KIX^0OZw63{Y=(8w7`DXu%dD_IbvOuaQ?^`5OLLx#r+`^All1Y!=LjPHjjUZ(Dp zgb7ocC>oFviPYQ>j!@}K=0=hMsT*Cg4dezXeE8-w`qSwRN;+9?E?sYJ`43;$fw6Ih zzAS?bQSEn^qFn^dk<1!d3Wv+G4HA10Yzc_OqH+@RM8l1DK31X;b2wyuXhR zQ$_;t*sEriPL;ZwM)xrDbaUh%C|B)G(mnu_BH$_g4aF$bJ1Kz;K_)cX+JxkLknZYU z%33bcO~&7q5}?6^MU_wxdH5NXqCVeLEbGxA;Jj24@f-8^&8W-CBQPQ@0WaYb~#;VhN_MJ#uc(}5AUf}sGTxH({yT2-N)^ckVs~?s*D4EA`<36A9?my(<&%KFXS=y6E6kPfC?XXLmg{T zFtS6Eaa=Re2s!@Rii8aHK?7z=loVctoP2p+MDnl=(KHqo5~0q=XC^{7EhQ&6te~HonR+x@XP0i2l2`P<@ytx$c>rN-x34+~ zR%E~3A+Rzh&YDX55K~!?3^CSpSsjj?s?6T9AAMSvV1{=l-79 z{}z*4CT8?u_Qj2?8Cx~;R>?wiBUDjJBV#o_&s&*0oJxtnI*)zzhXeFBcTcm2l?-2< z_+mXW1WEx*q0s=AUqIt^LGqEH#yFXqWMLDgb->0#rWpeSjXfq9faDQCFvV1*fjD6q zQjk3Y2(~cIwqh58iq>J<41o3{RRMA{Q9PIdCa{>u!X%cG17nye)Pi~hpvIzfAjX(M z3d|z_Ef#H)!_bU_C+ON1@GNLwkS<3xH}pAvV^Q*jVaTi34`d0r~=9C_s+eq2XkOvlT8@xH<~90$?vd9{SMnw8qOC zZ)2VMApk67^bso@_8zpx zArSWtf~+r)b_qNT{^^)v76(T-|u$UBFvrSsBt2{0N{Ly7_xkhen+2Q4Ml ze^vrI#GK{-EgR@=LVZCfTfvJKQ^j_QDQ1I$3YLz!)GI~?ZVVUg$gF8qs)DLwt$0k4 z?(1mW_`hynk;0)a57G`Y?q%x4W#WpV(uo8^rcWpi9?|_k5CRA;b|%z6T|B*l@uL*Bc|%h3CCoWzZ^Q*TB0sB% zJOro3q>vb~wA>$umX)$Q$AWX`zCRO&GdTu&(SER{E%P_9$Th8MBq!40 z+~t)sk{fPo2}J1;@{@xoTh1q%JsN>(7A;r;qv0DPByIIGHs?#gtVqf9kR5V^C~Ud@ zOImuX_*~ekSJbb{6>_S!N95r!OQb$Rt3!5Lat_hl4iwUl74^$GmqFuTv1^egSAEy{ z%=j5~=PXV6;6VG$!;zun!Usn2iGweZxSnXggVhbjVS6_ z#0X?d+Bj(29rr)W(@e_{u#l)s;G)LrDpr<^3@QQ!8R|_AW;ma%)eXl|G%u-NC6NAd z8d*K-k)j&ZaOYd}tU)2xy8j+CNWDmlA1#;0?^Fb!=&2+ZkC0LI zDg}%AWtUFk%d~)=x$}^G|NYjM-pk!qllU5~)H)-I=Is<+XJ)2vJ|?L`J_1MX z<(e>u=3U>Hv)@LRZESS|`7 zfz@jFVsoQZewlL+ef(?kJue}or^cKYuW_JR1finE-WMo+G`v|JzmTr}C7b2q@o z5b5^)@?z4NErhiTZbbLD$LbWK+b@&_a)$}lyP_idId<$qkb|5s5cZ2luS?lVKW<%m z9_ANFkG>e4w+TtI&L+A+SGz@BihmBnBJVbST)Er|(BQ-2z<#>ockEw6B2lW31cf?+ za)W*x8D|uG`sQbw#nOs%n`YaYPTP(g@Vs!)=~VGU3vFbw;0*WXzdM^Zlx;V4LTVja z!KCd1jaucrxkKl6UDJkSZMFnsx7rkVy^hCKLQG%1OPwUyd#bE%o1aGYQOE?F{g6QUrme= zF|ud}g2WT%(49R94K5as&Q^K)h-;!*qOVM`X;2u?8!ZPH19sSScYSDth>q#MPd%upS3ky=sk`Mh z(XE5vXzB=QiF0$ebkV#h+T}984i+~<6kQ-TQNAn?5jT+0yQJ`7pzUDIf`6>U#Gs#; zdHgenRu`dES~}{Un~AV#*;zRV18GR++48X!{5$1<*HH0dg?fq5yUFN zEw8`qbr?jyrCH$h-FRw|;Fl&Pw)OH=GGaEP5aoQLF>e&2ILOKcT z(hy~gs5vhNCLwHE()|0#>C+)_De(H+unPP4xt*BsFuY`qx=Iut?s znr-m(WXL|Z1>6FXUMbW$Y&sCsi{a+{+Tjb4HoN2iBgv<%`G7t}Y)^wyF_v1@EQp&5 zOLKuZh#sVVEH6{mmJ=Xv`V|oKY8vWzJZD{W9ulS`vNhv_3XB<(vLuBtZ}h33I21`Q zCaEZt%tJx(A(A0fJW(xNs8GV;G!{Qk9<`Xu^%w0dPh}v@Ma!XXjY&{MJjf_rj%uaC zi{Fd=vSoe^@~CkwhD4Ye_Z-G|`K+`FPFMteMyt9bckuE?RuF3~wMTL#)C?FXcv~gA zF8i#Ue{YCirT520k0nxN7hb?HmN|D;b_)r|Czx&phZdH$FzD-Z8K*WDiUZMG!`faczN6~&{m7t8lrk%|--?}Qgh>V=szV>owavKfyPifC$A4d$ zx7)eisC7Sa1*rYy--;Qvol)Bd4yDfcw!I0J?efIcCSsy`7c_7WciGYGFk&V&`$C#N z7_QrU@H9+5q<+>gRtACNpx^lyU&}(m7Zij|(W=@P1%`{;Gp0}3!3Ry}nw*YloTs#= zTf$wR`m-w>Psg}P!qEsRRgx>tF(7Zb)yfVn3Q38i3Z>Fz9U%QzskIDyF`#0|20i_l zwCuIZXO+AJC%tN}T&1!U=DsaQPYZN>nm!E*3{Pj}0(Y_%uT#-s70BQybMU~VDKY(k zHUpd2CzOdL5vgs&Ytq-x8;|TS>PcM@j-M2%4NOwfdj1F*f2sRh%rKC1b#LARMUe|qO{0Ko0%pSdyaP+VLaRN$o4Rp%rxpD zV!2SihPX6ms*6cB?*^sR=_6ArTnKcCUE`6hF0KdKy5kgUDOYAU9>ybrC7PB9#pymL zLRZl25A~le+WwG)P_B$y5?Oz4d`!B#={?kK#tzON3 z`#U;z(qhxkzF52{VRlVZFPkHy@`9s*n6If)l877^(=RQ?ipF-5^4pe~iOX@LftspK zbryT`dvOY}?$yRyAp?+EsV!=MsC{9>51ymzdQ*Omh*ub2@|r=4bsGDrhCzrs*+>wX zcM17|Z|KvCWlUQ`{mfyri+1DXou6s>9j+cz~ zMa~0wKs#E%zON(HRv6zu+J1ax-We$*0~;m62R)lqoQIQzf(QRy+U-~U)a4tI9Ps>kz+<=}^&)1G!1oRR8< zn6ae@RStqfA!6G37}ru*@_EOcg-1cPQWhb4;S zw%2djKG#&Y&ZuRqOp95e*|ilq)s8s^XUHRy44>mPOP93G>yS=K#W}5uqRVifCQZAK zmER%#J)FOxmE5>Xj^pp~Q^&7-OC$%4+G*_B6J~fEE5~4U%vJrDu@EktD~S1-N+^FI`FG_?ou%=4qI#SNw?Z1{VVe1GkimR2V+hh zuiszKUo1T!nwg3a?T)|+t1bw~*_3mqTB2d8oX{%(Vc+p$yeom+9+DVaw^L`bK#m*< zXb!k=Qz=-Hv9s;iSw2Dw7FM;K#Pc2Q%qqjcq~{zKy#;xfiw&)d9nnlJqQ&#o8>7%+ zUNT7ILRhDQaN+GUw5@Z2<>|TZ^1&OeJAxC|KH8(EjMIFJJrCprKKdY)kqzSL35*Ao zVKh3lilu&5f2VKj3Y4;BGBG8Ck|LZ$LS8i0VIo}`{3QBslpK8a;bw&s&%lgBS)_9K zVj$3R=sn$j%%*#8<55~DPd6OQk(gc8@g3gcoLOnmn_Df*OH2(nrF^n!jfgylc7+vM z29y7DdzM5D&@C>85kqp=%xF2-T4E0}vqLDo#E~G@(R2|7G#6H0)SapI$P^0;l&MQo+Y*r>QQ=#QX2|-7F+A=@ zF7-U?7%>WYq+5Md!m($K#_R-z>EPT!_9P$;Zb4u)jA{^iCLG71TiNpY-X|aoB$qXC znhp(;Ezub^isg5v7^F#mto5Rt``6Mjk$zofjn*1v2E17d&1`fm~T2WN=cVm%p0W>tELR)N-Z_ZYMFtr>)NL7U5Kw^3VC{Er45QaR@SGJp>Dd%GTBy^>(?x-EAin4aRXH z9RTfn)aG!^yFAB*6eK_3dOI!H$w)>rkSa&izm5-=@CNOSD^M*ek76#FFbVn%O=v_X z2*~EFIc8#_->|KG_hIclq>~bir=S9Iy)(xX&JVIWiS^7B=|3f<4CL+n0!q)XU-_S7 zS~=XsF)e`1O@a0UX96@){B}`LM-h|rVsRhyp zGSzLSAmCMU0a}H8BKd~8W=UwXQ97!5z14mTjk+S#e z8=%Tw8Uz#}Hs=UiKQ3purP89cEV5hWv$;h0R2urWkB05;EXMM5{{s3tYipv1LD5_n zobT~Ix96w_vb{Few{B?F@XXXVue=)KTOCJ-=M3l`STEW*)+DHN1>}J_o56Vh=p$N( z7|-tWQpy2%)#WH}^2BknB#HWzlO$0hqPs!73(%qxf}B+P(A8FO&DA4yKs|_+G**mO z#WQ17@9`*>byXP$JFZIEj}fa!s^Q^rAQ-*0HY*YPZR+(T$R68%9)SQ!$Vm9MvA@1) zD0DTI_>12s{iAw%!F~uE!c?%5-NX2h8~0yUs9WdnFaPnnH1(ghOx>Z&V`w60(n6$Co3RbQ-yCf45{m4vKHYTh( z8Fu_oR)jsZABuyWQ^hA1gKkqftboT02uhC@mw)%rWc|C2VJZ5fC zv^UZVMA?C_T<&0HdifRjwGPE8KR;#H##dX6dg1tY=L1J$Ka~nJ!BF~8ag%<{tmJ&n-nfvdr9Q4Ow})U^;J}re;+F3wdjtkl^mBRK15KVB{$!9;TB}{ zMV~B_5kBwfkG9;uD{jnvB=mk=2#^0N_S;b7v%t&q@x+6uG}wFa60gx1Ssd%Jcv6BL z=9q;(kdt=+RU((C_q|w^{1%4nsVkts>cd4I5(|AF9_o*Wd%|z=D97Z+2OfHB)<|MG zV#0TEdAhl2S@J`L+;=JF*!0!)XT0}6>#X-UxY?Od{^|oR@tB5ET<&u@7FDGpuAGk^ zi#gx2PE7E67XA*)%Ck+vDVO3Lt9bv65Sza@FX-@%&hgvwf(^vW-@L79ty=VL%RLz@ zcuDvYTrX&y6E1G`4#%$lYh!s;Q$?}r{j*Qhm~$VQ75Sn$qP#!vD9X9(!eAeHJN`FW z2k599&!UA@lt&td|I03Ep<){-vJGegngA8AVr@viGa{l=K(*wA`0u}KndG&i%P*)Z z(NezZRa(fi7qtF|F!s=$e>t1muNZ`eaKmaqx!hxzuv?5O_cD~z?}pYRFNXvJlU_5| z+U<+)^W%xS(wM)dp>724K6BF8=Lc|ef)ipI_?4J>t9dwND*S&>24ap#Brg?HHGe0Q zM>oej-f*!z|Er=z;}is&b`oJSjE;q%_HG~6KOo$8J{?;UDt>EeLDjO7Zpj8d+uq)r z#=9l9?H-L{X;ZKN#%}LQ9@~LFQ|= zc3hrT7}NaWe?vyiTTuwH25W-efH2!~P(C_-!jMG^+^M`ll&5+F$SLsv_At=4L5$e4 zfr_S3Q5Y5Qs&uheVyj!4A=fwyaqA6prAHFH3;x$(1;TKCj3@5rAvG82A&`u`dMTsd z%mzvg>kMne$zxGf9*qH6ay{l}rw;AEH2xgQ)SeOha;Kgq7f80e+}W`P!%ErL?_W;)n)T!WP#UCdjFVyk||J^rnyNfX5?UYQ?nqS&yy&AzR<2 z%s{k+rJTM1k9VH$n{-<&72iVe-n}qV@tFiRC%eUSGM+`qwppZ97Wh(tkZxLZlz)7h zq%7O=5k5NpvLOO^`9C z_;Vrhasg71cnPci96B@^T?W@bLMHmqrFu5PGun>UyW z>UI46tB}E~-L_cZV&&Akn6?Up{GRZskT}mCY~a-!y+B2AFQ3)evN^Vas?<43}z@ogV#Wtv3erPGu!ixxV(CZR{gpP++9 zNkP6y-Wjme+F)XNNJO>(BbR~*N!HSRwBm3h4!AgRs!r+>%dYQ6&}$8^4%tG`Lwzq* zUXc0B`!;Sw38Mb1?3AnFe|@C^v7`;PuPRuVYbA{T0aC?aB54R{V$hf|$%lsl%R~{R zc!OAsA^_~$quR$Hs&u-qdCrVP^I$Mx_Z6ke7bT#gwB2|AeNYfec6S>+7%zhq8zbe@ zvdw|;{h~b$I70%GRVf|Sh83W7+sGZymM!RQmWVsKS;I>Ngc(J3t=oki089uXWH_9Q zAbB8z3xcpWHm@Q4x}zxKRw2>V*v(j9{ML#TzgF~$RQ(Gpr}AM1N|PjtNMo=@9RL!k zKsm@T5t!NED5s^yi|v1{dI8^wu1HP|$w%0!r%2nL9?9hH61&+jbj`Q@G3hdW|recTm_>RYB7OO9%n;Lyn!uGQqJ zC!iDOn`-XkF)GEwo=|WZ+1&$m>85n6rO9T7)9?l=Z1-*HjzOeL?w1#9+G(7J;A9#a z3kZtuB*O>sUCkmBAN}MV+o-uhGeVcnBb#sea?H!;0S1FNKVh(auzN9Ipu`64Ghx#< zm9xTYw0zQhvY}|nW727XaWd#$UgT1?JEmWft{3WuGZQ?#AX1AhI3EyDd$c_5l-eZJ z_q-ER$45V^++Fsi}7_S;Y=t*v%J- z4T@RSmxSlG&)vtvhV|S=m=6*p?k;7Pnq2hZhzP?$ajkd{4UR`)KMI3zBXqe>Soj+o z*Bfvd@{|K;7IuwEF}rS`j{H= zwba%NU;aAYxKE+*l_WNrjE@(^i#%ncYaLOci!U0!?%u4JU-oHM!U!;g$6?oL)&lc- zqYT{d7}}};Gy#FYKGQZLcxRZxcsfHce0%#nhR%Uj5hBULn68}-eVH13Z4*Es7+|J1 zA;e8eux_FV;+yAHiYLAwI{JfiLm1kgkMd8vdfaq^feT;^W+0M7u`9Nho+(SM9Z|-6 zHVbnbQ&xuvVp`}`JtzN2;ZrrbwrjTbB$syz>v*JsRC><1p2PX;;lDaS@wt-ov<@q{ zlZ*o@O!Bj6A*_IDUII(~LvDqv-j$hDfS`8Xj zd>-Fe1GO`>9C6G*%3sbaV;qfmUxo09NaAzB*XkC>EuS%b?se=cx@jwu+naW29mVTGWUv5apKR$e3L{9Nm38p<& zS#hej^*-FXoD&T&e}SNk-t3Z=E>Lr=|0eH2&WwxtEfQHhWY8yTyBKORM~D(Wy!dPI z=s7Sgq@M$OfLO@S&Cg)AGEs=!z#q7pX~>s^Jj$85d%n)18dMJ2Cz4+*m^sb{7q|n2 z_^0YiOu|SNMVW-xr-e-3pRQiCW_0by4gv8KDTFo7h)5cSmEOJ34cO&g5$Gs-W?{pj zxdW;p^(eMP#fs1*FJQdz9qDnx7!8vd@&;jTJ2X}=$R-taFv}nSSO_xLuymHc6Ico3 z7hi-BxK)+{ruHS!SPahuHL;aDXGR3r@vV}$jmp!`-4>+l(=Gx>X&iO1>5GHL+PMY* z8ABzNL1^?DIi86=` zZ@Uxh5(2RR4>JJCY7QM$AFZrT`^K)mZ;|fmM$;-?CvczkoYr9sA7xFMK(*2I=Rl$w zo&y!{k!CY6?NBXIxu!2FjfTzo01+Xh_?^9m79E|T$=cs05cJkOQ|XyQhNvuXSR4Cz z6)F31v7|Dz6+sssvs4^-bBQELF3oMV6gUgD@eRXu{Dj2(&NQ~%xFE;*`84f%B%b6l<7UJ9`cnq+3gy1~;Bn&l@e-&rA@JZW(NceL zk;1v5;8H>^nH6cpujHjAjI``gSL6pf_E4)LtD?40@@XYCEj+0tVP(q?-vS0Ac_=2G zZc=5*n+^8r9ElI)&*aa9g(^~d%LB@^Bd8_QK6@nXvPo?u3p##+n{j(u-J&M@2~CNH znh?C8Q;Y7uJy!sif+UYGonEAi{;IZsmw0i63h+q6^}Of*Ie#?-zMWU*{Akops^|gQ z@{>tHdWd(-`c`zbb? z-+5sW{%80E}x1sKoUb)&XGH(1I5xNup z8wP8#Q}58CRE|<%x%p5Uu%tALpg!H?`>O<%SHB2+*k}R4!TiZ!Rg)%H^qhivXeOBc zVJSEEb+kQN`L!jw(6%GnEb#FNb)mF<_pW$}<6|4e#uVmkR+8YUdQ!E2@|k)%hHKd@ zfoXj)g_eDH<8Fst-ZzCxSQ-_yG{t1f;k1m)fZVA#m{cw?9lRYj0OM~je%0V-l# zN(_u4;?{|mb*La$C7ueh%;OIAXi5be>S)X17SSZtNJdR?Cyn!-;>S}_J3tiPnr2(* z1O)mVv%s5Zxp|>aAr=zb-3U(bQ5D_tnB*fd4~gi5&C^`%9jiq)aY?=X4$0Y&=wXSW z*q;f;`nHA3)6w&kd)*=_n=AY9y1m-f#_h3F*yMHe07Vx5{w&}K$nwRx!iMu-vYc#w zi!apuOz)=jj^eT%ucmtFAS2bjq9WXc@q^U>DVi+E=`>T{9b9<(jZc4I5fkbf*s}DA z6^x+{+Z@c(@O?^q1L|oWX)WcFYc?Q47;tp7n6jF=5H{ny7xKfHY=LsH=A=b7ShiDv|z18 z<2&WiG{0rPYz>v)ds-$h#*QDB)<`84pAl-MXP-t9&7EDk3Ke>)|DokU1?+;atj)LX zEp)F*$`X=-p#I%OXDz`*ZLLPxY8!U!nY?DbC4Oy^%>B>8=pBCY%bUyLxLA3WqQDYlB=KSWYrv==tqdyU#;3Mlfo&yrU8uLMr6?+7+4uE&7K6EE=k3$IQa zArn4eFt5v{0QRYU#p>t2s@M|w8Cdq`9I~FmK7HsYPj=O*_rH@1{QSE|5l$q@V!LaP zJloyU9C`7v^KyGWZU1~{Sz62pf#Q>&Bbj^szY4{`^B&3Szn-i@60NWGq}T$5>RG=u{l;i?@+wcu-v7`r@{m+9NA5 z^V{=bEO{)VB1QP7nZevX9AuX-^TG&tBhGh%n`rBc3F4(xSpvGv@ z=vDcIj|di=VuXJrEhBi&R@L7-Y#TMEmJ5VgVDJCPs%n z`qFF~VhE-SK$d?b{jma@)f>RYlZ?jIpU8*ec1E!GuYw2fm_-4E2sB#|IVB#%GGleaSK>}^v1O0?6Kq@V1dcV2#*52%6jP( zOr>=c2gm=$2J103Yg8MOuibpk&8rdqyZEq=pBGl(O%JyBKeHX$P$#*uVF0k6e=dvW z<(S$_LC1s1POP%b+L3G#BCH5xKZ9uv^qD<~hsuQ{dUc#ZG=7g!t2Gt#)o+0!nRo)r zm*WW&kGZ;{Yq9sXCugiQLR;t8)~-x*A~U@gKbRVg5Uc%O>2h%GUT zb+v8?QixI*+lH)XL+q-db?f9EiX~LDZaBC2_Td`a7uV))5@CHSCGARy=)0^Dv{py= zs!@yDut62#u}7Qn*^E#B-KLG1G~qkZH-{ojvv@xmR%!cN!L=uUJofjfUDBix2sIm* zw61ag4w$ylqoCde05#P_TiXKOve%wF;?>R;KDxJ4bWGUnIxGC>iQ?CbUgu*U_nPe@ z+YB-_vAv>=b^p1%yNX@~_kTCAO+7feTHB!Z<#FPeBR7mG>qfs+bk1Db#^wkSlFcHR zoxdC8S>!gwl{we9P=&8T!Q8Ap&$Xpmq`LfGJDgc_4c6*0i%?Ln()l^o6>#tv zTlvpOVU`Jh(2BG!EmA5MH{#9kh6kf+3q0PDQvS~Wi;kJO*Una}(|_+>^4v6h9<6PP zSGh|UT|85j7}~kBTe?@%ZVeUJ{=-rwW46jnMB1%XQs^r+>?hT&D^#gwKMzNVvI?K4 zV_+9LgjJZXaWotFws&#m;Vc5!#I;2S6IyX4 zKb0^kWpLET`g+>05Ni^DRcG1OcU35eue2=j3`8J8iamWfeV7%wU-OHKT)6(zOGNp- zGY&(vv^Otpi(h+I!_OmA&U&DYQT-k9bee-h=GO{k{iDBP~5=PB6%IYM0?<`^1AtPkyZ4v zj9=Q>kcB^3f#qzFk*S9TB*}wgEKi3(FhO(^$uT~i z+<#7_+d_;P1|FNA?BB!ID~lG_88@xX{pkk>q5gi)f2$}>Yd3?R#(AlAU#nlhrjIDXM z?_=ubu#`9Ai>Jy+Ue^(D8vzU*c%6UKizh7wc2$Xn-b|s2{pT76Mo;o!_XY_Q%vA>$ zGx8f{#J>wv)RC6ZxQ1y8-DzJB8hHy@XG4Tx2bBq6jD??w3uqyl*W#Oc2B0CmB{-7W9u@zMY{z2?lVbSKho@J7kO#e$sU1H(B zC%I?|e ze>;DP>x|}bMdJsGULrXDFlY0om?-{XglwVYI$~=haf2v?NLBf=_@zS$Nl@v6I;5fx z#ND_4W{i!u96*Fc_mEd)+>Z?#?S=LplKoDCpXOuZt=L1taJoHIyl_P}?VWuE+P@vr06>)LcYN_v@W z6(UhM)|$J>F(qZ%dy_HuU;F*tmLA*rO~4v6Di0lKv-#?)*7=krNe9G(4LxoA62)r18!E{aGE zsI!C4(azV`-Q>7vcW0Y5k(VX$9WP^R(&-Bi%kkh6Mu@I@)YqUod~RO7xE&xL{F+K4 z6v?NU*-bMWcXMlOO~!y+dPOpK70a#83$i)C%S5RQXYC7f#qa+RSdH+n;-eO1OSLVt zZZ${dgcBIQxBV!FUH`CMW@hyHv$py$rI3I`EEP8yB@AD9<7Nb4Ec4TfDLODh79=o&!F8}&uRiUawFnO<+>>-+oo!ZQLP7VZ`{@?_qt?Bc}lmD4nK=&HMW z1qO@IaaBE>17P00^X?p{hHV;T3ndqEm?tm7?(c~Ob&n6OeL$I(x7~m8t$V}4=8NcH-KEVd~pqfAH3H}#sRqUJ8n-^KDB%9rvBt>J_8RysxIi2B_D?P&c_NKhgS{Cij!5E?Cc`X&M#gD zNWr6pkHN0}_pEfQcEp8e1&d~!uQ5?~u#L5kavSO!b3}9oQ!TnhNr5h{ew}ekYgBM( z6UG;EEn<{ikkj`N>FC1J$rpm8!Lc-;FJm`D{k1Gz-H3!H`oq%bEhEvGOMT z%sr7{ZdrljjUS<*RF}-(+a*n~Nn2eRP^s#%R=H;D<8PMFnD1o88b?|x$@1_Cp^%EA z&!2*f2(A#yd=`5TXH9dTj`Oy@wI?_B?MkEU>0d_rfnc++I32Ccq0;^38~}+U zoz)k^*P!McIYc)_z>E&3l_WsHK_RTo$MUoyD3GONPO@l8Hu&8%h4-c7rPLHTqxhe>K2AIXkvsKIWm;_ZYC0 zIUYnWF!y&{PxDL_V?So-MuwrPtJgB*Vd^@RPmWyAE$uX1@w#@xf6qfEsiV_uPyua)L~!uq(!9R zqEXsjMcIDTpirW7Of8`(28r@GOJ1SLgYxrZz`f1s<*JW+c<;h?QsjaHksz081*$ye z>08RsN`@RmREzE|o{m#>JGv{#+Kg?6W2&5PE|>%C37|8#KA?mr+z2@wf@?}UO-s8g zhICC}T+#*`0tgcOkl|v0apT1>u?H_k1U!8)$`tfsomR;p;u2wG{( ztZ^fT44c5Q>yw++JX|R*SFo3E?Bs+DSnVKJcuaGH1G10G{JO4dK0PNP{;6(7SSIU* zx(6HmwI2Q0UT#)>1vBTj>Cz}C#IlaIdn_}u)*@Dp>WQn`xFJnJw3G5DufUOarQGSYi>>=D|0!q$U(83G+ZEuW&R4jdJ6jeS;omLSM2vx{*us4mFeeV&0o$! zj%ISEY76rpQX{u4!@~MYp>Nl-7z{7-F`hO;F6N}zk*IQtZtBOC@$#G|J(iCyov5u? zC?oaTuXdK{90%p}g5w?sb4p0k`KtrUNv-=SXbzd$c3PbVW15XQ4C23i0}kYmQxTq% z-ss?MPLC-qM_5jwmn{9V=$^2~FKH;+xPq`m9Oammg(+UGuXJ+EbE};c)-4nI`@c}L zhz7?9rr+enjXc(^xI5z0Nn^0&6qHodG_)+NY;xG;%Hxo)fKy=|{qObgDpIUOsWRm% zRH{-fBrGB-rcPYF2926DOGrvd%gAccs!h8NofcUf7$5)zJb(c{z<~e=fe46!1W17l z5FtZ_4ih%=D58ui>S&^MRdmtE5H7};VvZ%&*pd@_a+4QF^0z0)h|A@7@uIPOd5`Hq zc*~1;nH-qt!5U(DZ+{QmR&b(t=^ASL<=M4*WZB9UNFqm&#Af%4{pGv|c?4+hXviDF z)4YeQRQLnE0`!t|g>;b<*U!a=kIc@cF*eqNEem9$O$h|y6&s!@{>lfY@HHLJmDuXw zk(+n18JUR54re4dCi_US=<@oKfuSDEN8m=!BR6LD+~{+WD^`4F*zI-Svd3Er)qjUh zruv&?)=hdHw#v3!DrL0I>S84hMp741qEmzf5I@7;GE> z-@x7*MOh-R-d(0&hfQyWlWSYq+9C>6daQjd5wv59|LwrqrN(m2Iz zjgG#xY->=qP+|u}aS$HIaj>INeoI=nnhxdsxp%{LEI0@pN**u;qJ4SJ5gPj>cX-<7 zzYC6|;y_A{Rzc(dZIq+L1~Gd&qo<217v+!IFE@2jfBTu+e&qI}is%3T-uLI)=Z+C- z*gBQQp^RXekNZ&0?8w@zpcKM&WayL*^KngtN zhj;7%9XKS@AxJ^#?AxNgqE=Tu`8fXnPnV`iSYvluZIT0q%Fji$;JZEIy{4EScmK!T z%zUlJK+;kOKyV2ES5y68x&0sCW_C;hJ%`_tTM_Y}<)VnKfl5Th8pj#(t<+$e{$-a=4?9%!CIie7vRu^>+F`vd_m> z3D&aPaMIPF8lrvt@BgvobJIn%0VmS(iEnYYw^Eb+8e_>JV#SO;-fdn0=VD#L z0N@8c27qnx&;S(}d=9~#c@^;eSibpZ$3$*}9l(p6*C1p+qprU5*F3QE1_1#2t1|!~ zVTv0eNf!lrJreatRTh%=rcySKdd-$tVcPv>%sCgT(hK-PJy-A4`)yy2vdgo1J}>1o z_f<+NNX`Gu>9Y&Z(dsxjQDaaCOH5wIlVX8+Zz4h~3k0hXjNL%PiWxo!Ad;4wTjewFG{t1^@xS zHyI(2tkaAzM2pUd0R1ttb!%iwN(k>wg11VOOxaJEJ4Ybb2(t`5(d(lD?mBuy-Qt0+ zi68jeW8VXuabgBZlB5d>LBt-qL6+db5E_RB30kD>NG3F{u0ju9-5^?i4GD?Ix~qxx zRugfz_1jj)t5~CqT>FxDX3Th>lJVk@ib&|00Kzv~A`aO>gs#S5int}5h%Na*ChKMP zJ4r)nns=XKim;Q*j-cEU6m^ueD=HxIiScLQLMUBp_<|vAtucLYgn|X>ky}K{D8^-E z05ynu=s_kk`N%Xw+>Fw?K3X$krlyF(O3b;zF{r94(c!rv;aYcO%rvY%5y}6VaU{pk zM6_&LzoEjGv*NS^y>}L6WfMV+&N4DV7AFIM#~9(UEHeUv)@ZX#F+kFI zKD!H4+VY;&@K#p@eRbRu8v|=o{Iz+lL4!D{AX2#us-TcmS47>Tj)sQb&-!0 zdW{t#;zhOUe{OiRI*ku7$XFsiC=+wcTNfml>0K+)?zPS+K!UrT9W9ZJLW~Ij1ze)` z?3Niv;Wu2a2wjCl^xzEAD=tuJkA_AOJz|S8%_8YljO87h(4WdZC2t`|0g{Z{w9DJ) zy3(UVGPo28h|673Y#R}3hN5ulSg@NUxWK}Cgmwf(e36ssG@`x{w_wPKaHgfl`>Con z1bYm373(NqOQG;2(u`C#D?pTV=peEl8c}BPz`182E zF%XSzjEphbBk3X&YDv0m<<`mJ6PHnj1c*sSITHxQ5f~{5f|LYBD#Ac&f*}oIqIiNM zEn$tZLTqnKtS;|ZK~Cr+Qs|ItYbcR9f6tm+Vs`#LV0<0({-ZQSEl)F-nCuK&vzt(erDZ!MPuAl(4 znH_0Ln31HOeXFcAM^66CO#D&rZG}k95+iNb_N~)Ub(tpn_NW{9B=zW2jEW9=eg>hC zBXgYzWGCRRQT0t`k~8Pk#9DKchsix6U0TR#&C7TGi8+8{7Q zitS?D(poD4_CO)-Vwf2+6108ub;c|Z$S5Cl)PG!;-V;}R`^W)c~uZJ+7)TSj1sch%vM1*IozN}DN7+qQt})j!-GlN!5~@$ATA+RK+z;VqT2#F zc94)5#wbNdikWzcCrEai*a#R992m&0=M>?sOoLNY*c^pulu4kdVe63G&Mj4hVAC zj`K+b1&YfMP6+X-ITqkoICBK$

uJ`Rl(T>WUyh(#Il^}b(;p^jcl4C!H=Wktd|_LD8=O1B zj`)fCX-8idj-LzkAE7uNhIqp1!IMspy7&Y8&=Nn?h?cq_w9EyecEscBcmhB%4(`q%1dweo1+K0< zf&l^L!mk*X4fDgSrGM1V1;~nIh3= zsDo~)JdSTnpt!+gtFb8LN{!bj#FRkIOEC!a?93dF*r}38jTw?~Dp#uGS@@>ROjmLa zooX@2WD<^4Dlscaky<6;7^G6Io`;L%$=bMhT_&B)XGhD$0=YsiS133P4qC-QtONeX zM*bjlHl4SuTOcUr4>%)}5|a{9RMV?z>Aj#_oS9h~=}kK;G@)e?hCc@shRr`ib?jeH zY1{!-1#0XM+f~{8REp29qEwEl59r7ff#M%pB@ve&t@%0=-nAoQ$sKKxq#zX9OL4Q= zDguO+!`3cKF~qqPFI;OusF+D!a}3Ls zKqz{k(J-|iL7321gb0QTOxjZ$`k2%KgqS7lT@O_l+9~}#g6MHV{~>gu67{Uc_#CuE z(SwAYv+42(l+0wR972+!d5d@Ihf|Y}O|F#YuD<6=M#Ts#c_J4IDl8B}!w#MWBMl67V)zhYw`JRk89lH|8m75bcXjot5`I{?i(Pf) z*yRBt)AP_B(_t?wxwDX}&U7#a^VL8uD+f)wF+v3HifE@BAj8fWAZzpU282GXdM;c?x`gPQsmq+P)4am zkj$L{oEz*Q?I73n_E}F&!(<_9*o_>GT6f5?|0~zv9}y+VzvmBob~AiBBXkQFwjzWx zcZAM>uqDzF@L~CleWvDNXdpzWED|a@V)H1REd|grToS=%yIjd-!x9r&A(KZT5JsjW zts7h0EvrhVv3wd%>*=E+gN;0hC>1Ky$g@eXDTnV#LVsVQvy*gs*mmSI@Jy*9LA*c; z%6hp&7ZCQxKJy3#GhgBEg=bVR`K&;FFqrWA0|E83VkN1N$uLPH%?1clpDWx^ z=}KqwXQDtKjM<)fm)`<}?s0_CJNk?npNF(5jR{9Y;!_NQYj;#f5frr|?#Us{|bj2#XtXA#yFuv|5uusCt#JX zDJy{Lt^KN^Xw>A^#C^XXVL;tEf92fGrbUEepj7+l>$E7-x?E+mgn3IWm6c}LmW2Cx z#z2Ipmk9%$On}1JR=LWO?Mz zfV;9P9~@EM5JI$zzphKrUbq&+U|L6d1CvQhS363{0nNNwuF)o)Bnn~c`as3)1K%Rt zZj+fKR|fW!!TmXZ`9GDfnLj^~s`~x_fz6cAlZ%B@(^zL!&Pn6L6TRrMHzf6VY^eUv z$UCSt>)41a?b6IC79>LGwz&+SwqFfo5k(^5Rs1i9?w?Q1_`b{?+|7mj;SC5uQ!fo zNLYC%1bm+4@Mi||jW2VYXR+cmT-a3h&`7b)EoWbxi@dQW;bFodzTMEc{{G7UAy5Zw zdM~`o#mB$kk_)$(j5DD44{Xc{@c=sBjq&5Eg_BoQTxY3vsscZ~C12b8g78Kn)py?& zUvtb&_orGrW2)j8-yvZ4GW|zTwp8gxLUn}~b}p6HTP+BJgyNly^bFIudO4FJN)n1A zQ{T(cD%P-hH{RX9HgAQ2K3fbn$?p{7O~ua1q|rF1U@ssK-w`T?=K`&$KjXY8I_6;` zQ8ak9Nd7@SuEo0~Qghvqr~J*Ix2m9>k{50~hhf|ffDG!I53jb7kCclOR|Y;b0(Zvb z+K+-s^hndIR&l7VMIUAmFQZj}mDEdY)T(O3rYsveQ8Z=c5uuy|8jv%RX2Fy&& z84K9u_Dd|HL1OXr^b_^C<eQuGoraK3 zoMT-S%bnA1PK^)1{QhzZEAA$|TduJcl>}Sv&Pe4_S1jrix4F+LNj*G4kc5cIv$uD> z<9_wf^fKOt5GnvlAvBEz78iTTk<7|UQ>qN|XifS4TS9=6< zrQ9VJ7MQc@jkP74ehP1`4jku6FryuE0A#fQ%1V2dOdkA{BDhL8q3F!s=g@6TQ$?Kb zCYen&aHo};%c|OWGP;{IIc5Xv{Pbi~PcZr8O{~b<{VV94n|Y{{lqtTiV}2+0qZ?o; z9)d?IgsEFF#|N5Onu<;;n~jEq^R+RG(X2BjxJl=ON+-9OxFK(gsta}1%T!+)-hvr< zrh4ww=R&M4l?0#<)Y7tc@2q6O3&}f2lou#!MKJCBf#Rt5=E4kYSUdD5f1Qra432Zj zOVK_ST05h0&`+z?;-t`G43RQmrS%|ldJUdy1S(Klo+oyC+dwY8@ve?m-PI_D)b>f$ zS;xr%+-k|podhy09rl^T>5<>TpSkh!!Voi*m5&;!h~x>2c2(!6df8kRt4}sA+7!pBHaXs97gcFy2snx!IWG=QEhrc z6N?kqg^EZBCm(^1il>D?9_Bm4zT;M0TUD;0$PhyGXE$HmJ4qoAOi>I*LrI!FVevau zwlk7aKOpzfY7^+aONbzXWT7Dwu3@tR#R&^elS&1q-dWLoRt-G{LR@MZIunB5kTt(^ z;)`oAJFI1JEM?gn+98c%zsVKbsPx73-L}7+CO<{~9i5{+Pbem|ZZWDgSu_>dJa|ij zLWIDzgo}DDJAvPUwy9fUu(4jv0NcS^9$|2}v~hoOy?LD#>#Tvjw>4hDAnnnzO1e+y z7G(ug-Sz=y_WsKx_uEE3=O*sKpDFjEJm?WvBU;pQS)A0dTj#j;k+9yL~ zJAGEay6Dv(+dRV5J7yyo!>XJ*JTbH7$F|d^pO(f`^{tL-y-bA&^mG`-9GmxEJK9Dq zGneDM&j;(98ncryx|g>5X(ii_p@Nd)KKI>wgwegpw%@TvHVZe595_?OU9ZSY`lFpp z&+pM{Kc*MYR6njQO0AWmn;#)`$Is=t8(@{=p^ED^&epSsTfnuN>&W_)4F{mrH<1+?{8IOx zX#5>GtzHKCp9u4jHruKU|Hkc;?o-Q#bS^l5&E|ut`=Ok~6wyvOPdULK^C5!sV#xSv z>8nNq_66fvvDBxdQ%qD9Wu%D;qFh4Trt{0$R>Fsy9x+69eD9uNP2EXU|%ecz8+Bl^YZ?5Zi zY=PM8DTNCPw8M#eLbs*6!XHw}TtDJ_K%@Sr9yG{mNj^YseI2(9EGNmle571Z!@m!# z6oiBe0Bqh07vuv;5dTbD$Zr}cZ8v_f?QH?V4jNJ{xYv)*DN)AG;RysgfBg?Q0t*lQ zdE@)>fUo27A@xtJ_yb;nR3~9G>jXaQUkEDdf=oE$V3S{P3WAU2Ld+Kd8LxjIg{o>} z=_w8DdkoLbo_YS@xUc@%`h(XXA?mvnw5_c9@2Q=ayk~B49`m($y|lN*_ZUq%1a&VI^t2T!KKy>N zRL!t?UfkGOZQCCuaOj_&>kND*WqW(qgAjPfsLh(a`&@73osYWXe#~cu%=GV7i4AaB zZ-`s2%%{Ig?f7#&)ev7+QrR{f#(!j1X+|w+vStYG{3v24)g_;oD}T)M72U{=Fa2eh zWk^2FjyQgYV*THuT?HGCtkr!xv}Z{7{gKrhAYe3fBaDZe#)!w4wPY_l^f2c8T4ywD z>>z%-?}iPe<_?1dW?WrzAS+|Z;j0J}yLnlnmc{i-8IWQWa*os7X?0MoT?P#sz^HMV z_GV6V>2nRQJf>|J=2>_RaYRdr$@^^2VL5*)1;$;wiRIe(hl$<0jQ&!!8|?8>)E_1tw--iCK*83E8hUM zS6fGivki%!dy$Z~OAh=vRLS$Y^olsWi|2(Zap0GCiqH!Dtt9Qq@Ne5?;ucH&Pd~vI za%Cbyw~&ssA;NE0IheK@!fLH}6f1u(Bh_zZN4)H~N-vvHKk5EWlD0f|=?=$-UPZ}R zQQ)5)-t@h$fp&DAng*CQYNUyHEm1C^AG-uhV_Y)*$X)*YE2l7zGGV8Yh&-rxhii%`RauaVg8k6b zWfU0#BF_fbVDxU21y1jV~_zgBU;ZdE4jcx4wqo!Q~w#54Z zlZ+Z^BA1|zl!M(0lAfj|>_-r%?8Y=*^pk5i!zI#IBlyE%b8JC>C{~;v@rc4oRA2k5 zCE-+M0@Ncd4@bp)BU8`s#sdqxQN~1wZWTXmJ#}_|CK8m&ozva?Bzol37Xw+GxU*N@ z`n;vuS-P5x?6#_gWw-e2`!+8rc|C*0qUt6Va>YTNN94>^Fv8cb$Ja{I3R5#(d~8gAZYB{PUefiTzEBe`sAYkmHkH z)y14b5p_2LhO z0GoF05EX?Nf%|SdaU~NFM{`x^Zp`oH`1mr?nT(o>Q-F1QmmW zHeO(Z@%v0`&TrXO2Qsfyjuf23I+ag8SX7sBx}&hufC*&*KizUaw0O3<-N9_d2i(eE z|7{#v(Q2)FcTIkPlkY8i{P+>X!ecVt#Q$v8}}c$Q*>*bDaCU2XA%X>LRFQw z|4w(*B(cJBCWrNtd1s%%-QDEl$+4^(zXs zmZ_YsUnkjl_ss1`cRm&3G-I-nn#g~}dpjIvZ1C#{)Vrg9kC=c3pP`IbMFd-*=S)A* zwenP;ed}@k{Vh>%o|40Ko4R(jZGrzRl|U$$9SVZ$6D4Dxwkl_qibOlMFZ;7#q|1NT zhOamXW}YMSUTy7!9~`*9hyei@Jsj;hR(a+AR&N^lvjj_Bwq$n21+aYiS_YX6O>`wl zmo8g%dDSI}m?^{#=fg0;SAL;qN7Kn~zoCb|lfx2{fFJs~a!J)*(8Nn= zBmLX&jm4w}Lh|sh5B`XCe@)dkty{_^j+wPtTJzY4v975mBGJj3nEaYyiaPy`+H2J| zk_|{5HHC@Wpvo`=jO3w^X~AltC@ob8I#yKI93qV%>c@QgDe9LehT1tQC8xdRiC^d` z%(c&PTXtYyce=?{8>Yo{j1<;_y6CJnzClkL=$Dr&J+{(3VZW#ao#Wk5+M z#iZm%2%ab2u+R3^KAq;|$;c{Ao_Uj= ze2A5R>8%gETGKqAncbpy#Uhw&HL|DYt$AqN$=J@r!hDdY`rJ0YGGRDh7@yyD=o)a^WllD4w^2A2OFb;mpve`&o?M+39q5lMv}DD)!@;y>?D%0t z?Z7c`9Nl4coGM6GKl>W%P+47)ZCyN3fw|{CKY{XP@34F+^}cU1%(`PS5&NfaCVCbR zFh$9$o|1p`&D76E*^xkrD(|CXMcHTm3)rlI|IXk(c2L#UMzNPF%j#^&bg*&#Jw*tm zD_C%7?K1~7b2)2F8-J}hZ=?=%lmt!1xbf>ZPYdB`)XzW2RdL2B^@k?gU=G5pamRv1 z^#S{u&XlimW1NloX&EW@x>v7#|002>&Xv_AS`}G2jk?GFPX#oCU{hV|ca;}qgwg(9 z6E5@HQQD@iCu2gI?<;isD>qeVdpUQF$l`Gw-ube_8vlq)#cD6&_7}v`R}K>uNe!h5 zzoUB^mJmyYy#|aPQMwf}kwWZ7qUpk<1PP-~CzXL*dt@ww>Rz?1?4qFjttrRwx*eEN zw^mnVtabg~k-KUbJE){_!DM~=tzhDD*TdjgG+k&<``$m z_KIx#$H9gvPBSk>D3gT>%*sMUh{%`a-q?x1q> zsE)gzIcFF#j>lDD7oJNLU_F~mdjnCAZ~n=FdU=bgCxgvR)=bJ9Q^z`@iAmAGUQ^FT zh9sRNZGSHbvW7ihWt?%4-ff<87ury!onsWeW}k3K>lFcD$V?SdBU zT6HxeEw{wUfBo~KS-w1zRe89>Cf(JL1d^zLs*k~wGf zj+aR72lBWpIs=|L2Lk?O66okg66n>_#Mw)-Do7$uVwq6z77c}%uFW)gd)HG9O4&!ST8~B`aedc}GNCNQ+>?z1V-h^bV zt6Aq#G@9OM(1HAN)J*t{!)ybAd6>w~%Is>S60JnKj$Rso6`UV1Y9w|4z~un%*ec zBD<(?aqw|t=asHh$-|$u9z)?b2nEn1W4tHM5B$#H1u)nOF>nN*SVpp4X7m_VJ13-4 zg#8Ay8G#%v@N03ZN3}$AruUUe9^B%95VLv5RO$y0OF)IY8oe2&x@~*;qPv7>0yBzz zZi7(SPCf0((^q^w4E}wb8!HmG+Ae+2h=hhOe&!~z> z_1f#UUC|UzW{=nb<3_stc9ts=8`-;lZPa6&QBb>0H?1<;(0OzeLYpC-6_jdEoB#^D`0*z&oq+ zCN&p%Egf2@2TRUY$0A{euQ9%tTj^s1MhrNtW%G$DE`V25!I&cMPzpAci_)_@wAe&Z32cz20Bg>sr6lU5FtU_pq_7}w zUdm^|O@>mWBs>%-XM$Rwawr~i%#p-_KE`*NH7bm=FgWBZOi9xMG|~eX%rH~I!vPJSVmer0~aYV#zE;O#DS0nFqw-+2rcYeQb?~}E;@Mg zo>a30Q<*e8&|yP2l*J%{RFW|sPIGWpo~lgzNP<+x`Uu##hAm=|WK)wI~ShjNPYV+1Z|6^Sd{2c&r zFaXxBPK>JSiVF#E&|+`uE%6tpodz8y{Poq)9T+G(Q`^r|W>m)jo|Y#iPLVvM%+OYj zztsen{eytp1O&Nkl~4>eu!B}i!|sBhWRrP&F@^g+B=t4aV^@1qGOrQbE}Gt-j;07y zwi$c<6UKtFy}fQ+CFizB8nS_s1c>G`RVM&(Y%&ewYMLk2psXye%zEMjco=All9n%- z#9CAS2l0sjO`1h+{L}zoPkes~OXOW%T5AWXTY(agk_L~BaeMB4V)l{BweI{vSzm}_ zDy$~&X;4<@FQBZxdcb+_X1=r)kFdjBniptC+RTRQeLoARlL5K$_RDECAh)I^rkm~3 z8$C<10}P%_sEll@bvD6qwT^Imywoi~FT#&A18G(R0)>U{XPz8y1 zXB7;0wF(jTY!xaY!YT$|GEQS#RlFL2mV7OryM4iL(2&JO^jM(V7*c|EiENx4Uz;g8 zVv1Ii0y>StctAr;OX%cw%eFuYocr0aQHv>V$l#)~v?2m+T6NHDzr~6!sPJnviyiVP zOQ*ZMi_f6GHV$$AfH8l>0-Xe6=X;+lBtlDacsvlPT9OLxprP0eXCFMaaMvRqXEUgK zX&#U*ivUkEgPlvF>epop3B%@?$Y$;OwMx@%A@igg(&j(Om3Tk#{4W(c+y^=z>**bea`n(V9&rko|US|m)5J_~KM+@*`Ol37}G&|tH$ z??@950jGTJsCN0<z(I{S3PO5uOsC866l1 G>;nKVNojrn diff --git a/fonts/KaTeX_SansSerif-Italic.woff2 b/fonts/KaTeX_SansSerif-Italic.woff2 deleted file mode 100644 index 349c06dc609f896392fd5bc8b364d3bc3efc9330..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12028 zcmV659DkbG}7@mjJD1@jO-Eqc8pH1xbNXy>V zuj4@|WLKW95E7|5CLkNL;`-Y+wfA&?GtJ-Z&L(!k4v|`??3CPcqTcL|tNjQ{K536( z{p-EnmtzN&6c7(-?6J;0;$I;9*Or=#CSe6e`Kj!u4Ul0&Ix>QOQ1Y#>s9n_Rs?i!K z`N8}1e;oFIb6U<-O6FuJcnRoEPTZX=aW9wuuhQ&I(s%2$94HMXDG%c+*1)?j&HvPt z{x5f&l`#j}k2io7&=!!w?=Rc&|6fkeUw4gUWm(R$oMkzA?`+>1cE=cO4|J9@oUwe@ zJq9WYEP(~E3>U5e2q**H@Lxant9$O-_qA1~YfFUmMWi9#7(z1*h0+O0B{7{3D{D^4 z+y+H#pL-pvasQi8$-7u`oy}^Vd1cS&h7d}o=09l#K=2N@4Uhmp8_<0KfKAV*4nF{9 zh``OnQviI=2H$)OZk$-3w+A?YOeVo(|CfPQ^gZ}X2Ef)13;=i^x&{D{0emUK0O21D zE;`}h2dxaq;I_*Km7p55ff-;SSPBk+6W{{40nMlsJ&B%0@1SXH7j`GM8{3B+zz$=_ zu#+;rEJr5!|Nnl_4|b>)w1dn}KY^Y`Z=qY*&M}W-CltGC;W3{5am+sQp%1+8mU~@s z)-i_vmS?{EEpPUg*S+dxFL}Z9o@?xb?}G~<-N^lQjQk`ZI}rTy)pLNZGKgSfq7o;5 z3C!o|;180KTmF@N!@SWef~L?@VC@<(_tr=p{0g*xco(8POvL-AAYBu2CpE-(Pg^7< zM^Ab3%zlk+nVs?3GjK8DTb^inatT2!oMR+)?S3PUowMxCg0eG3A)(Z;sCeEYpJ1XU2Jp}l7$I*6 zI6xy&p)wjudn4F^U5%uGuuCZ1$Q&p=y(q&{2;f zKrbeREM6T+Ko7)CYCtk?2a>fX1#8CI)v)hK=p9wy%t8)58sTxvW*qoQCGuy8Bx_BV zoRMWu7lbROj3Msrgt%?et_D%7pyMD^V*VfRke2622B|#P0%UJMS(2)~fZ!M_5rred z2-%CRYaEa|mdV3woIUz3r@ zsRDSjcf#7lhn~97stOI11A?yl>nS&NRT8SHmDI zY-?rR`%X4=0J*Sqny+N>V4&<$Yq47FB)0sp0MOE1LMEvBfI=MqO@OfiE$s(#wMUOk z?a9zYzDK;u?vGQ0?veN25XS#CVNm#a$Bl0EN*#qLK;RrjRM4`8EJv8-3OBJxmM6j_ zBAZkc%NKQ72XVu_B>c7gAgBnhq9!*k>Ki46emlE3S>ShfCc_v%r|u?UaIr6tLuw*N zh605Wg$jilMFEOJ6h$aBD6}ZHumPw&7Z_tBvwd`g$Iu{&avGpp#snH2jkXUm0~plp zR729dB>4JMQey;Qaqo=6%q{^h7tOYiM5h0`~o*gi{E6v*hEL=GsOXV?O4_ZQRHx~t)UzRQP-ftfIt~F zwkvtViXKQpI5z0J8QQmHLUiuWI#z%{_$C?w+&rb``3s$9%;LF|Y|ucR%RZB+EI}o9 zE(z_Kf?y}Mly&TdGh9t8Lriaas2*{nG)7i3s?v|Js~om8qMo}rHG^|(gd|Q4xY$54 zl}5y?l~qfEkew&}G;f47?iVhF#=Y%l#EJ?b;vkWRNHPi6+Bitu8;oEGMm&gGtDuc6d+M@+8kNlV zWA&lX(#E%KY~TJ$48V+4!+3V=TIP*~O{rCRfqw?5aK<445$&Z`)@uc&4(WnmOA!8p z7DRgOo}RMX#e{b2Pyl56EUjx{u>5V2=YViYP@$idfzbw0cy^x=ZeKlG0G2y3L5pV~ zk&tLK(6XY&0LstbI>0fB^pgKMdSt7K79u;F0qHaMuDL>IXM7*Z^Rq0D&f2 zFC7CUfB|?BJ%esz0Ff5<{WucQVj!sUR1|h#dr|>60~BRR%d$nxcqWPcIm#^3I~d^k ze`biDMdK`oorMCP6J9Dd2v9O(wl^_lNhvKraG!($7{>qy=uj5cXga0YK&dTM^Qh7l zSQ$4=l^j@KZEQ&qXaX&88_hM=0_s1eAcR3!GSG*fiyN4UT2tvVvssEdh!QXiN(7Do z5C;>m4nf2b;doeuAcnQehCsdoD3jqChUcL@v@@DG{66BQJOC>amPVIw90N#zx%s(j zBn(s4_wfLHHOb;kSu8ETmw7=0Fea>mq*Kcxl|o3id1T5QgUGqePw~jXg-HeyO;b1C zAwx4&WPYcdN>e0NX>eYt+Ao+$YDy`ea=ElAX^qA_TR_yZO=XbF@lhT?gMo2oRUlcG zqtO-#lSD2X%xlvs;SZ?{^MMGYc|=m|q;ovDVt*U?z1sjMA{xoYlZ^?_YjH<=J5)xl z00=7-5v@lNtDTKRG=*6+K3@DJT6y(E?4(B1(7v--&BfcB}z}?cna>21Ttx zG~{fct&y~3qhX86BPb*f&~C-U`iwDXaWcnO`gFhMPj#e8lhaYSwD=@SW zTN!vOG{`rox6-geZ1K9)KDP{*fB>4VwXMu23TNc;&EsJOutC#}z6~B?)}m|nA|Ck6 z7KH_{17;Ru$th1I(FwVsCyaq63hJ!fZT1;=uJhy(fSTu1qA#ukro`)24RD;WbhgzT zHNz}?zn>9t6j5%zv!AK0!AZUafHhpB($H6~P^poQ$$ifX=JXnf$_aoXR3>~@F17~Q z1tmn1!tsLm{qM7x>X%sBEtNqWvvf*Rgn>OnkYWEY<1W?zxi?Uwc}K)rR#>sS9+cHb zM~HTFnFw)Q>5=DJmV%GFTp2{~yueiM5#smppj=CRZh!}%?)j7p=FmLn@>l$fhDdkhC!{|~muRIgFFLiA@ZBCMhH;GW~$}|-tku#mPtZ7U& z12(KT`vE+$i|;L=)ToWMJe9hh5Vx49K!;NE4P8yrl_U@RBJ(&TK4BtZ^VMmY(+vM{ z#`Ue}K;1>k0i2u4L^jKO;yWNy`j3M+veO(zjJ5Q+U_d25r|V}BTMc39hF(9jh4oRp zJeR19=nD|XV6GFf)QSuol@qjiqtc}2s#?70La*m(Rg+a+rm2(%LG zKIPOvA~2$ver&qB1MNOCjale1AUD8KTe$EchztPKN;`x^s2T-ugGwzf;S4#gUPsq; zJs$XEf+b@0N-S8iCbk`ul*33dE!etT2vk{aJ8pK{;F*XVC_m+H)Xa+YshB6YIxf}0 zWd+y6iOMKhYO{G))eNaVR}UO}pr`p` zaw}&R?1iOU3PXbp+*WeEe>#C#BlB@X1T&yD1IQ=w?nqqqTDud(93{8TcMbBD_js(r z6tLXi>3{s?@zQDWbd_T^i$(Gbm|E0OOV0}>1l~8JWu50E1A78vY-+|~B3od-2k%QB zTR}7rk1NH1I(|-f<%q4@apMTjQE3O-5T7-#6479#qIS&kt)wx_!{-$d=7>_YTyg6> zZw$5=W>WX_lZLPa&%<#SAt#+|*3LrG*BqltowbJgTpvUNnP?)wviPB&tfUt5?iEIS z0?o`Uu(I%dPjND|afnKZ+GYcJyUOCVno+Xs>bCp3%1u&WF4k%-)XAgH!TL7B3t-U@4YUt9@q0 z?xqw0>QHe_PUbH9B2cO@Z)U1+X5of2Ml&)1+QUmgBzQ}b6;ag)UAzVTLoY@snlHu>dF0aw=BDIwb_q2PUnqecP|fMs`9oSPSJdwbDx_I z!7=N-(}gjxB)(vwOgE*`yHr0h#xUg+4zJiW%Y;oNO7d4`$jfgh%@-y@YlW0Qb4u!(pRC8xz?WI#78o36Aw;f24~j*LrRqyme=S**_HiC~UvPT>tatLHX;-oseQw{! z10Rw&K`%&BMCOZ<)nizFo}I5*;N2zikNceD?=a$Wllqd3=Iy4P1mFS-RZ($0)v)N+ z%Tog6xTDhOXPG7HqZ!B;z|cij>VaVG7cG&fB@fpMF^!1CRz~Kyx z&6yPT%d-G?mShL}+Vo8#8aDg6#1gNMTy+h75ozAl8;QzD)6iFv*@}v8RdavNP-iEh z;Y3oX$K*6(XhyHtpt41yF+;r6v{8nQSr`h{GJTUZ*R&#~6yI{zYKZb9)qzhZt+Yf$ z*9#TO*enc?iRL8YSk^0PSX|}S2}8twwiwQgiM-jdih(4;aeLjFND#=9nO#uAt#wNk z&9(N@kkqt9K`7!el?wdNdT!{U%qQGWM0e$2LoFVr*+T4kF0-E3wGe$>Rue1VPMxUs zjOg>W^RYpR3b#>NXNxZJmOjx+~6&M#22cj67p^kthE~1rjmP06z&;-7j=^zMxI0_ z%@-JQ(0XpLfJr_hyYe{>udNv6`kdyi1e=6=o#{FZMYuPz@R^(}6q4=gu~^EPQ1gf! z@mfLf@b%_|frr-7PS;aLRg#l=Q7)Z)j{Cn@(<9y}VhC5jD{4}HsLNrkD7dMQ|CA;y z=oLCT>SQ7?9}_hU_i5T}*@|dR^j4LD#_fhh^lC}#@=#i$5H|>PGVhfYo`&5tjj|JVZATZJIe1N_l4TxRuWSxHr<`{B4enmQiU z3i2~V?h9dQ7Cb|UC-Hy_%SE4eT_(d(Xx6Y9-o?u}Z`2K}ykC!-?8j_jvU0HFSpN`9 zXR~Ip!$mn3w7euGQvE|yyYdjo2|CEvPShjHUc`3;pnR=x;hD!;KZ^@96h*_y17oC- zW;=MUWtnaa7kJ5?3cyU;(1yk-qm-W3!h@zuTDmUcIVbv)7g=oX)L4t6SQO|_V4)^= zb~o$?;DO~alt`L4u0FokYc_I_L?W>@vy!2Cg-YhrIyCs>sjzVyVag%3)(&M`z_NO7~{vjt<08-7Vy#y<3DCI%o&qy zEdQsl5DLcpT3#BUF2rv~U6fUC-n{=O$YLVw>=yfTCnN=O_g%3xJeHIFFgSO#He6|r zV%~P@k1Xn6zlHXor>F4IDECoBQ}m}`d;5o50{89m?@A`YQ#8hB52+%fp)ew zvTlnnSOn^JhxTwtR?A~j;YI~O?P=iK(bP@`^)Ie)p=XNZ+?!+waZRpWt%8#nym;c{eJ#}M%~bSKYP0!*JB8(RS2wuh}1#vOZy@x^S!i162VTxKboB5 zg-*n2e_irG(l8BoU6bQ#H%<6TN+#b*#4?t!t=>tyNXT#A9+u1z%|2J@lV{iDzPB%1 z`YfM$YrZ_a=Rp_2;gZzc)<3yPDk};(kbL>$NG9Jjb^QKXv+>zG%A|Mk8rLcojj=^< z{G$q*vfH^GHTz5DSl0BUtj%0rvFg$v`o*jp&p4>Ia$l(iQv}wg^~g6%o1R|OQh#7O zswjt~4UW03O{40CXB3tPx-g4(zK>}O2TRL34e0@8ODtFH`C{6#>V8RXkx-mwL=*E8 zzuDH-Xz=Z;w=6qR#-m_V4B_P-GJ$R8Y~?WYw7dk z9fgbPYkwMind3h7U4IpShGd()QRjM4laRW!E^Xdw(Qrl43D8t=)THsKuF$<&52em{ zFoYn=CiM6?fwo~nK{V@J79HVB8&GvMGjRG85I-nhV)==9^lNLgNmj7T{Xb;?hmnp& zX3h^Zhp0}Rm(8KM3WRCbuQ#r5pFmQg9;o*~R-F)SHJ7Y$mW-x5D~|RhnF$GTzeGB! z&^GEG8vx+@_tu4@J(Nyn%|^)!ON$U0k>i2ti67;=l0~fyF{^R=RL`v1*Z>s!p~BXc z(wS}1*gg_7`q0=1M#S zQH0~xY?i9aALH?2Pye?RQdV(ei{R-~7}Q!t-T!lE$zENq*>%e1j%9)_an?xGLHm)D zoIIt&;lPKVPe>-*8Ey+ajE+nf~_ zN!-Fev=yOn^$(i=wAeP-?@#;m&+B|!exl7g!rb$Oi`%5wH_(?#c0|7;Rrdd5R{Hhr zx@<2JAn0<3t6MZpiQI^x=oGij>8cCvJ2f0q|8{;bCsbbW(KYxI$!m(VQ_gPOZ#F}3 z=xJ}!5wETvg*P7TDVt}@YpTtc^DdjKYfzlecrd)S#KmvqzCpQJo!_jj3mE1?ZzH1a z-g%?6XlgNYa7NgE-s@5Oo@g&Dgp60%-o(81Khv?!zvJ(8G(8<}R18}ur*a**Ptvri zeNk|hA+WY5%v-2WCVJXZIcZ^P-J;Np!p;ktuDSceq9(EY+lQEO5pT4YUEl1Bal1QY z9Ru@n>vU;l&W@m|w@erDDcnvwOucW2!8VWBC=JWD1N$)p5bfyLnw5s;%8dnXx=1oN*iEs)HNLz3g(;#UAGT9ixN2 zH|i>{69?OZsoQP?qaEzw&BMz>!^MHS2AavEbIuL)R&45tC8FhAhC{NcYYuE`1Q!?Z zgChnnvzL?WT3i&RG(pbR(*S+)G2)C{l^4zdgam`a{zC_0eE~(|2f!g{`V69L;v`_k zSYZKrjc7}5)Tf@(dg3_1M5DqX1b6Pz9}*Yk-yypT*=$5s!%XGk(GS7^sp%KZZ-UO1Jc5aQhjA28aV~6w*sqq-fLbhXOJGcsZj7 zBTdM$e^YS>?PY=Yjwn7pPUk`ufIqrs^II@hP`ZhD=`d=2&N$OBSlSsm8$AbhEQM^{ zJ8WdJ^nD$fEf57$C>Bx`%wzO zineUMz#dv_izis>d9=;`S7SG5$B<;5cnTt?d@>LHZvM2XGaMAJm8?hgQSfj-yDJ zu^z_0TU+WqGyjPRgt0No4~|KP%@llI)w@%6m+Wq5RA}zrR1WF&0 zwLTv1=RTYO3DJa~;jA{Gx|Z&4mLhnZa$vgc(2m({0qt!-*$Loty-m(^)U4g}=J~3G zN*^+(Ir1#;z_?$uST{FSvj(VUz;*uMxP3F{S)A^;D^d018;4CH;>ZjJ2bxT1a{QlK zL#+WIn+>65Nr*22#Pz2v-}Gas=N8Q8WTGN_wk}!R`T3K4^H-)%)7Xp$+Xas5S9Uae z`;#M`v@n7skwj0t+g6b(wZn!Xi!LieZ<1zVacN}hi*1cY15EE8ec<_&-42HqNiGno z>wyZ0$iM4Hjz-GnWJqNCHO{|{6^8QsmWNkY%x#8eQfFHGaL9U<6d@nVx0H!+$RAYH zRj*l2So<>=GVftweUj`LF=J%eF)Kh-)kSer=hK0fU55i>f{%V%2Rp9}TH3EY2^aXb z`*&10eX-@+=QQ=5yb*37ZoiDGt43BxmU7_dm}*0b4EFBo?|~na$+UC#+NMn%O&ua3 ztrq=HOC}LUbbf<+-WdEjc!u^rYLaZ8v`IE;59A%xC6k10r95O#m=ZAj!K-(|`e1LE zbLTChBGmKOpNQ!L==~UmeB4TuJnM{ChkR8y11o$ydkD3nagDQ~QkZ$uT9D)3a84V@ z9mM&80NdP|;WayoT@X+saFhL~;dss-S)sG=dHx+Z%DRwY&wP-wt1Xz)7o*Bt2zTzP zD`*g8g1V-17MZp@o^*Tb{D1Yqb^$UPlEH(}PBn>)RqJ}0e#z!Qn>n0WNC_RDecS0C zI=gJIzwx)vARD;Y9g0^4tc#VwG|ipQ3bTs#d@-Ly?OJ@cDZajmVE$qxj2y>XrTxMI!2l$_Tcf5quGPmG z-d4(~VMz7>Y~sx4TtR)NH_=v=aHWO>CNgc?9m+|mQ3egqmn|1Y;)a}?!Uqa-Hh-#n zo6-(pWEdcY98F{tKtpgFFd`dKj;fPm; zMl#s}yru|8?Pys!pSaHndEF2VpvMIMYSfC-m++bur%X>Avf7}(ZHMi1lk)b$R)~iG zp*_te)g<*vz;lgy#8#=i}8){UGxT`xD68S~c1 z^F^8Ma%-zGV00K96m-vAXm%xv+ZfyZx>$8u^o8k`r^rYSj32-Z#^gAp2TWn*aKk>;ENI`;{QCSF#r3@xt<9r%;4 zJAEs!woSf=7O62@h}U$L)a&fi_cGm-@8?f9YY(FXh@wZMY1}bXFH(!fg(DnPT#VFz z?CG@QxSq&HM1N?y*Bz}=o`#YLgf3UqtN5Lx;Onm)&on;5PQR&fC_EjSM#0#)ATNNe zxhql|YGvl!ziMr>Q&D(SKqr22>z}u}@Ym+?EP~3UD4b*b1fjwLG?6J99UTj|YqjWw zX}3E((7PZM(7bA7T8N`mjjV`C600vMkHd2Pfi#7EkJ98T-j3C35HuP*?q3+=(2UQ_ zAufFIyh%^f3#Zz7`+$F14&!$h-y zBd>1tE40B&&VfHnbOD$2Q!ECl5oj|1EoTqzvP8*(Vc}5myTsHT-Ip&z z-37T(S4; zxonEbd;KrsBR{_#b)kLxAnJMqgWMEAT?py}IeUPaGMlO1C6`X7YQ>JnyYhseWdDsX zmbT_f%{T`wRLd!y55m`PK5C1Hxo=KBZio`cZ^rr|iAV1V@7Igw@BIYkIk1f@FH+~M z*_wl*Lxu2No3QeZ_vF_wSnecnwoV3*+?iBVPy${S~VU>+pYn_PU9eoCmijrvpNKpy&as zXffm~BF`)e84Pe@x+D<}pjYbOrc#m+ZavLLdwvlfb9dhbmd)Ux0fL?Ureo;LWi)Rt z_@PFH31^xfu75x(Byrd{LSLQ3>`t<<$Xg@Qv=vj#Ep&0EY0?S%4f!}FySO*A4pZ)HCec%4V zMn>vvV0kzCzYrD^*m&pdN5Fke8=E#k5^l?$XE8%_$-M7~ue3Q-$s^+2R)<8j@|g8U z^%Z~y(78|#vsu<~3#8c9Afo@;_&V{8CKpF zxXsV%YN5Gsj`I4Fv1Te%9F!lJUSj(`7s(vZ;{l6==1xAX0Rnz61kTPqlFQm-lVZu& z%CQ$T$Y#r&vZ+{MW~g8|B$b&>Kr|-VSn^K>gY0n8L#EFmvHOg3jMK(zD_o-f_3^9a zHpB`*;!sERd-84Ju-n(e>f}IvFF;+y9Y4A|LIJ@QXI4)_bHi-S9nS8rVCJRJKZuin z@i@RBBB7w9QRJOikiYb~Q!8krypEM|p=YDCDKh$q#i@Vid3=gRj?v?gRVPIMpp^w> zSGe3jim2c|;Ng)rzx<5eQmEMMmxcFHAt{x!?@n_=PG@212krNMz#=|R?w)nN`{Q9a z-2@-RcMUArU*)mL5Lt9rixmQz+p9BOK`nE=HPuj8&c`6TgPuL>4%rhQ-w^LT`zfgK-IJdsi5# zz{!FM*PUe+EgxXHSBZuKCT{@~xOmt>>8&pkGkZJB`IKH_5eBT+y`@ER9$mkpgrc1V z45$?1+67#ca@ugH0%SC2Zz6nJWRObexFya+Qo33u(9osEmal6RYza|@Lp-j55hHqEo(hM$x zhLd{>8Dv<>1TjY7kTNzF%Eyi^C?XPjXC($^@=4H;D4~i}Ao7r?!yO!lSY$#@pr*Hw zNkO`RLvV^DkWK)0n^m%aQ{BEygaRNm-OJ?_DB;pgF&2d|tax9KW;dy`slbVWD%Ukq zK9h=J5@H^cE12ekcSFz|~*?6QoD>U^FnSk=i)1Qqr0Uk^L>J&;rZ+HAAoi zZl#eB`(cg%MoY18fwO2gm|s91(nmtez+&{uSf~jkQ8`FPmY~N#GLzXMK`4n+k)>w2xk3%Kzs?pLt!iz1nI~Jy+o0<08DygmjNXl1Q|K8 z6+RD!(P=PD8C=yagS;4f5;H1QKSls@;C&z?;nqan&fp#=w1dxVz*|rVUPd3m6&x1X_#+CQ&ywszu~Vq*NnkDO?OsQ@zD7_64x)KfD_K1#-da<6y{QFc*+UMF4c*;p@B%oDeH|p^$A8Yh7E$|Guy=`VwCiLy@Jf3|AnzU;>JHL0Q5Z?y^ghsYV3tfm5@2Yu3@K3yhZ4`U_s$jlzHy*si@RK~1^ z?NNLb>NJhmGsl@og8=);OY~WW6j}P?+lVMty1sWQLib zz8*)1Y*1LYd_*Q=ULu1!BCrcjRYSxw!n#v@2o=2oZGRykqlFleKCT-DlF39NhJ-o2 z(9ixx^?bs<3bM4L|F2{*W%19SynliE&V_=CCJP+{6AK#$7Z0C+kVv*1xy15FNXaND zsi+kwR768dN3WQHk%ZAde56-iarc)ZBuf~w70zr*%mc*Lwiq#xeHSOGr0 zb3DaLRH-%IWLxZU$ni)jzs}Tmb-AwfieA&}dsA=e?Y*n_u7vzZe(VRLZ(9jDAAg~< z{-(~=#k$(dkGQkKjyzQ`)$g9TN+Uh6(FO&9*7@)=wBO=IbUWS1Wr7ZL3;5In&{ouKr^jC~kC6N*wp;O?) z3D7S;P+b2CTv%oIF)ooGAILnNYNE-vh3pz@2_Ax4+7TexkKPf%YFRomh!yLo0K>jA zcX`b>42T$gVRMZytzMGx+X|FM#wHD#E(Iml{*pw z7WQ zIZj`BRFq{Z4eVMa#dW*I~SqUlfrfocRB3HkDq$pXxlk!f8y-9NoqE^A3>wv;wqSRw&jw+~g zG6HO4qBYgc3kdN@iLA9GwoR(d5Z3`k3v4b$-t;j$bRS7t=AB< z*o!nNAci4QlH@U)ksB2TLq{Om8nakk<&LpTD&E!F@)yP8HQ2lW(B_8N*qG|~tuaw{ z`(TY&UAK-73hUYBcTLLCMQ{Lg_@3LpTIQ8*3aqN@D&ny%V357wgydUpRP-2;zl>Rv z$XhnFqF zS^@8V3-rySqivp+krj`4oGzUaPcDl0UV9Q&O{_{d6nu}>yXqLD06QELrbtrCNRBEb zPl*&LCq*hqiK?VbHBzDac>@Uct`z0Nw;-s9uPuf7d50NYW6#HNM_t>V&pWJ&HO_{P zQ;5~!9WIM>gBtLm4hjuWXo5lu6xyKB0fjCo^gy8xni&JNw~~oHGA(H2tTy&!%vu<( zdWKs#!UNNEoC27^k!eXA(y6zfDU9Z1F1Sw@dtf)%I^wihc$)9R1JzKSH_dFYGYh~| zrO#c&+HxVg@)E>QqfJ^GIREfk(7u$7vXJKWyhE0N8Z*^Rf|{7mE~C0yFN?L3k1-Sd zqZL+Bn8c0>GeS~J$-c|8efwAmVb}DyoiIPHq%?nN-Ej>B&UfMs@^2uIS)`mxnw;$A zu>o~nffHD7KqX42C_+GX5^w!U#huHrPkKSkKIIE5>U-H~29XKv?$XFGzrwfiq zC5Ukla;Aa&CnFgYv6pKV9!mmclPuG;VS>%zl+2fagq22YgeJD~@0f6>71j?oL3Z zddf6}XY;+Aw*)QNW}qvM66WudwqFqW&?Ac*td`|AWM6X!qCo^%+Izy#o4E$mT9qu# zO*+TlJ^kGD3*Rf&ZtxA>2iKyqrU&49U61x{#c8Fe#J-h$1> zr?8AqMI{@elSe3qj(ao5{rL+q3t-d-`><=)vSDnfG+I@W9G?e8fe1gz*uBxp7Bwkq zKAdXon2(Imy2BTxZcELRa+WOwAe2b^6&g=ub7NJXyT%?2-b+cic~ z0(Am|A~9tEi$>UN(5Qn;;>rLXjorsS0Z9%52}#%kAd(_t2n9`OG^NpuCUSY8&;r+_ zlmRLlQL{m(4K7J519WKAu1X+FJ-E~dSsK8lAzT{4r7>KZz-LWSXv2EyEM<`Qo;e6& zVer5T#PG5T0A5?IK~UFhF9>2YnHYhv^)RsWFxq|vA~^tn_Z&g+oIoV!$;66+i-(1) zhlQJmg}ahT9$t{A7yJcXIe2?G_;@(@dN}wgOY--E44ru~^NwG0R@;PPb)=r{&_S%R zs(wPPOJ%EH4b(0!4nUy6ha9i+MEs>82bNW7?i3l2P2O1Y~_ zBG-??&bBS5!!&G~)+nYy#xHr3)&tKiIDX&vY_lgRjQWqrWZTivv}l^DVHJ@lYF$f_ z)%3M}Yg*F!enh3~9P=^Hz$i8C6@mb331l>akR}K-8m$`UHpTPdQ#MCIR=I~Ft5{3S zN;(IYuW1%y)?yB&@mzViE)*fhXa<)2eayJTmZoW6a=Mn_SkzrrMGf~eZjeQsJk_d& zAfVVd+K>g!Qr{h-5Cl~u!62dTDHBt9t1Co(7FKrYguMU{bu|OP#~%|G(gL66nL9LN zED-zrech#*cn+*fDEN3)H?a$cIut&aIsd6intT6lha3Kud{e@8eNUbF*%PYz?3C)< zU_Q0>TG-m9vb^ov3q!C#ekSfktG=WM;y#zA$30S}a9a+2Y}ic{+lBRGdx2(}b1=A8 z;rE-Si@aoLWF{uq1XvGivM5aCv%zo8CcKvYjjqtfqcetz4Z&};ddk!GGzvvGyk*3s zqM2SHSj;(cWVGg`(aFR#)kt&>zT~D@uR;OzpKsQ3{S0>GFYd%k|y|gtOUd_7KlCW+eEzfhz zLnt6fZ0fKp2N?N*9a2B6VXduPnkY^tPG`pr?F}>Yy{+c`^NVeZ=4^mTLbz!YB{q6> z*Xyo7CfuW$EfdX+Q^dW`-M&-ZDZsQ1*Hx~*((HgmX*32DEabPFW7m7Z@{e2zu2aOD=UkZ$ej<+M>G&4S_?pEW zE;wH_Smf$n?e#mpGfv%e3{uxInR&(772kEA-I(Op*Uvjr`WQ(Jn4cT~phT8Q)AP8N zvSrOL7xy)WFN2b8^&x@x%j2G^z6t}eNccqk0Q9K^eAg@rVyEw;*gDxD8#fM@h_<%3 zRXrkE<#ltyK2X(bq0vQb**0CsDt9cUH>~*h0IS(c!xTYCCWREWZSmEJO@F7rg%f+@ zi|be1v>mGU_Scvaf8i6(aDcSohPX}>`yKVfw+X^$wU4fsZY?pI2y`p%`v)9rsbOeK z%u+R3(lr>V_W3JVfu2QqoFkj4_b)i)oq7Wjy?0U6y(bhdVA?}$UsfzijRI!*tfMas z!%`InG$+THB`_a@nn0gLP!}6F()mo9XZ!;rSG2TiP(WEH*LM@!7;C@vjIJNA!gX2Z z5qsCv#akhj`I-;*2Kr4Dayw6S_F7wB1T-<7VjP7&3KF79P%=Ud&4EHn^HA{TvMoSA z(6L*X9|LND1qFa6qzWS)!X%Vnq^D@u6qd;)<{hD$k2Th^Dz>OVjhhaM0#Z2 zk%xcPKyNNrThv`tWGfFbQ>+E_AD}kl*VoUsC#Yel{tke$yVy)BDcR21#BzlqQ{D63 zoQq0cum=2hp|*w^E0t;{A~@I5sW0n)Flnn@abKtAr6pDq1bYLpmZTlVxYrsIs-*m$ z9U5b`#E@pCVvbPW2#uJUM6kDGkZip7i)_PE=p%zgQmB;qPD`k$P1HMv9g=C8MecQT z3^6Jv`^{BgbmK$f>DHHh{!Uqpdt%E347CJVBeEGE=^>I+INp;PV|{I6?XNJcIz$ny2vAhJ~?n@BIQPY zbFzvD{0$>LP)Dw+0?kKgpS;;Bn0IV)X=VolMV@XQzFD{N)~n9Z3^tBpp~(}Si3D?; z$RMgPhG0Yf;2)qU!iw~QEssZFR(Hp)QHZ~Z&vbxjlmQ=3{$w~?8w(ix-{ zsiNMggF!-dh-T_1${jEj4)d9BMKMB1ey99_c+UswwrjTJc2=20(T250Bu(@+B^xLT zHM$;6sj72_#r*aEK)h|?Vv8>vQG~_R;&9n!zNu0CyJbky#U||Hg+59ZKt^C9no&@=bZVQz7R0)yC1!C6vcY4pAd{tGEaLdw<=v+QEe2EUAtV-ziQe7k||V{b@1^rTpI;~ z&t&xVXw%vOsz&Lfw=}<)(M^VFpsvrinRw9An)S(tvvy#Zo!O&N*{Ly9ZN!p5SBOj% zp#aTaV*Zv1nCXtGu|!DDC<^WsdBGqttJTkS*rfu^9G2MDo3lP%hGHPV%v-gtjTZy; z3DnG)?tYKGO$@{z?c5vcyF!=Px=k}+3Ee~i%$bR68#07@^BBd5Hi_bPkr$16(@IHM z7w|TwT`my!K2+vSyb6w{Q6o%~82rRUW=-6QYjhL$?x$7MJMSvW25NNOoBEqrEF(Bg zh8wZgIdWQ!-n4>?oNi#+>z8F+=(;|`Q(yp1F&KX7Sg%bOvjqs>whjPSc824XCW9#Y-@7pG2ol98}`e$3*(Mx zi)2}Ulm=#9{&B0bB+!97|0;63w9AP6%7ny#kgr3!TNYvY0J9#8ev1^}TqF}PFPl8w)~>s>4ldrR{qk%r@e~h0-$@hcMBr_reB15)_(}0L>D{{k4m)~LE1K`4ogY6Q zvgRfgP>ClHyjcXGn%cW(?iD>FtRt2jPa(iy^R#<(t?uJ|c_JAJiN(%KBPjQ~& zmjP>7m9?Fxg*`px9{>Bly*=RfLpv8vW}Bs_OL86xE*DrUEMI6v~bM z4OXcUbQp!%(D7H{vkJ|9w#vempPw<)G^Mz&C3T~CKg+{TAz5isHm%r@uf`{SQf5!+$FcDM(nmlL%!adf zb+qsML0owlwmP#?KZ{9^o0Tj=3$IM)<&VeH4q^6e4-}lixFSgu9G@N`SH+P%RxF8V z<-I%i0K>ZVJ7<5Jtup}RYURP)xpO@Dt5qPSjjT0HWOFex*@2pb*C>^NwE#9Yl{ z?33w>+kVu`_A#>WHzhh9$LeD;k}8n=yHV#eR)LipNVJah^jo}JKeyf<)t;V#c7>wgCXkX3(aXY__R3sZ4=?ZSB_!sRd65kz6k%rOhs)}g-OM8e8?u5W_Ysh#xnN#M)VOFq*gHD^YZ zTZ^*43zILIW)MvnL!+C-KKbOZSgNv8Gk1Ayr6zmdda%K{*sM_xD|c)qBY6v-`^AMh z#T7-l67}AZY=Hn8fx5Z01H!b|=~C^l2h24v6L(IlA;Lf7aq@ryXXO;Bh>vDSE5u|y zLU&H?cXyi2^Fj!HA=I|B%22hrW;1LU`&0kVoGrb00_s@sIB#-95@biO=N8C~kYb98 z>!I_irFfIl_c3`PQF*@Uy-6;}XQz%bE(j-gdk>@3wLQ@)!yAr5eN({UOAGUOk z%vRtX$*Jn5Q4a5&#?nO&_Q8x<;Bxoaj2G5B~<_>q01EI;7#WAJP4 z+L?!6m-i4Atk^zwqr>B}^`~X>vdOU$Zz`v?Hwc2C7 zsgrI|DHlpW>C+QoPbY#hrh%5WIwR1HXsuwEp7H0$5mIIR zkAh+bPn=Ql*69VISL&SZNTQI*Bxe=vuZWT{>Ktg1vDnycrwdGF{29^$4g1y};dK}xc8~mMWNR=UT)M91W z{4s{#2s>&rLYa3P;s#Dl>MgAiR~pll{4%eKhv36}K&sZ31j6cEq`viC!Rn=z+)Ida zs42A~wQ0_(E7XX~ysbk>+|=B9ZZtyB_>6k3kHQm$a zK2&NTsQ+H*kB;WeJqI_LZS!sxeRniAgLMxrNcGTMBYc3?vu5palxbM8sE2j{HqIOJ zNq~st4NQIJ@IxQCX*qjTFMysAS5q{)vS_A=3NLcxAd%xZ1Ancn7@+9Vh5>V zb4z#4ZX2_k!|uiy{@tj1Xwf3@xr5r#rw=cuDch@c=u)pMd`DZI1(+ku7Ess9WO)dj z>?tuQHxY=-3QY6H@iWv%NrJ8_R}~AIrpnh&dWQl_{r~D2JlH)AYI*ZEyJJLFVxH33 zwA(?!XcBwgYMHsOGq@28Tgv7rU@?TchvqK=Q=57`qwL~hYmI_Cxc#WqF7<5^%K+qB z>s+%U_i*dyR$#qvtpc-bET)PrV25kb!_3-!HQ`^yQkl=HsA+QRrQ@Ret*I*SDE>OO zqSt7483ct8qYflW&1KQKGF9d-b~qjXDe~gS54EW3OFUC1hhk>9C}wd8Nvg%_u*s8v zzsWxdAkNR9Ha!EM=;oXas$y&9F)9Rf?){ zTh5nQUqR!I?ar~#hJDYvp~UVjIoeVe1kD|qJ2X~R+|*OaODFGX-4A1V=7Zh34Z zMMZ)N<>B*o){4C zUPVGhBIeZ_=Ai4=cvE*>a&Wo_Bo#Rf+*xf!LLZ(L8G~2skJZ0S2r(ECGZmke7|lpb zuH9>hjiB5tE;xejTw#(_MHUVg^cxF~+>~nE#Z3Cz5ovctE z*tNsA5p2X?(kJEI_aZZ=`G&lRO5XH#*2#yx!>H^2Q?qAfxEBQ@kmbx@nQ0GW&@g2L zl#p~WSqhQ`H8NFNNoNEY*?;~b=L?1>&905^R#5}hG-XS?XY_!ZM2*KRG}`$J zm912w>c>JSj-+v)y5iBD%PXWo_H?;?w%KW)rlMo4%6Wazf4<4y2w3u@kg2#Ww~Z<- ztIEr<%|ZEBeAP2FC?ytKw|sS>cb@Og%F9MLnjqIqE7|b(oYcq(stiN6veF|fRJzc8 zGnGmk(Ms;IsaNnof4}z&hZZ^gowYI!YHZLatEK0vsIfn;AiZpDOX}lloE0WRWdavR zH?P#BRmlHILt{6cds$RSC_WogsMdU=K#@X!cscxTMKP5=)J#<84vaNwu_^W`v$eCw zfH6@Mnv}F{NG0Wv?+`d>zmsU*qbE*S>l^l_2GybtKF?Z1M2>7b4&bb8n8~Vz7J({K zoF4YV+fN|0Q&mD6ljtCk@EZO5tB$yeM@^A9K<%Md6n+`$jtwS{Q(fif2p!S*N)jSS zo+n&9l%74Jx{93q`{VQV#kykM)|Z7k2}qg0=eeW4@{iA<_4NwZui|k7XZWSA(8-&~ z8Ble#`U-%u#hQ-P7=*}>rPc1 zh6uZL4U+an^|J~;9S>^ow~CJAlC1a^2Gop2uaipPa z21f#)0H}4$y6q{cNA`26G|q-EQqq>M=g_FzslriWVOksdQFD?-Ab@p6p6l@|fyjK-J*x5x*^RHN@JN^-H#rjIVETTy@H_uh#gC!Op6N;!F z(O?3_`0*6Pew67e_0K7Xt`NY}9I1{#elpn`1SA7NCbKon%E-4A8d3!W14)25<89TE z5lvRZDn#VNgy_O|Y}K9YEJ&bU&GBCB4RsyefUR2#LddsOn>=mbUp+T_0CX1u-DPYu zF7nn_J9mwMo49Km9B964^^u>ZP`a4f5iGS~EhWGfv*_JQ+pm}=-$gwf8+W*ux$zKv z0;#q95ifhspV|dA-CgV5jPA&c+VWW2;$Vx|Sm@1B1R4Y61yx<1#!gR{2hPU|@tpGc zAE8(jo)_g8u5DIwGet0x<#La5zln7XyFj74+)Z{Kh7I*i%d2YCWgZ$bD#4v$%rLF_mB66DpRp~@w{)B$$^B$^R>S@i8CYk)V{da4 z%Lw$06Z^9oc0WmS;}rC_P7C`p_%p(76UpYGp z3j~l~{New#hQ!-uUif)kZvt?3{?M}^@aq1TMkV86X~rOvMu5n$U~K`~*<%H{S((vz zoHRp0HI^64GLpCq1Q4nd_+6&*xTj(2HxI_s=q(R)*%Lv=GHBUdkNLM05NDaHg5|P| zthT8GoEbIJ^j5yraTNjuTKr$mdd-L_G}WwSnhzn6p8BvavNYyvH3Q*0+|ZzZC1C~s zvtgx#(4uLse;i=3a@|9{_^PLxw!boe2Q^2Ho>Ac2U5*K*K*2IIvWQfaLa8C^0|vNJZ13RGwel`n*PheE~c zg!XeLDMTOUTLLfne{R|-g%p#&@i8`$k?mqy4iJKdLkOTS}(zoh908lUhW;qjdUZuZ7F5p%1t2M!E zkuJMKC**ZmXirC;;CI_x#MnGZi1%&cc1Gf6~4~UsJ zAq^QKeT~He#qAg6*LnpBV)o^&DWJH1y+51ZI~L5!GJFb%^VlPHzS}ejFKJL6DyWH6u8A%3K~me+Y^I^cj}OkYL3`Dq3xS zUS8_~btoUc?*9yjrRykKn!-}`@UYVunQ|r348rO5AJA(*Ity@)<|qcL4O_;%QD<2) zY(Nx>Rn*|71Z8jrYzb{R>et^$tMxj^l^`9nXa%tn>A3iT=a=*56Cu(I!y|;VKTmvw z@A^>_wIECg1Au2?KmH7rfHHt&G#qG%1h6f59s`N9Z48X=voSa(KaRo6O3WAnRdn+r zv@x{mfEGMeI$6J_)~U-lqcv;Pq!(YBAR)Ju5)&(wnQ)2C=hLXy1LGYTw?$^5o(E?x zDpc)i^RkeI4v~;S0oV6czd%sN{6ds#H;(=Q`!u&&HYV?3wSFCIVBPGE`n2&Ev2vX~ zwU_YGl3FiE%~E=EitxoOATybhK-Eb_T%^vJL{{R(8}E(0q0jp`)~PAhcOapT0q}yf zC36Vfu%tu@ib#yo|CYzYI8{S3uv2{kBjP;mQb>sS(zw8b`c}q zWqI}|(Icoo%XzQmS%6|fNZ<9dnUyoZqp;UA{4gV_NfZAmLFm5|eCL89A)}z8p$o&n z6pkeV8wVFpq$ts1#EQe0C`qyu0;$pnrOP0aDN8mn38@@1x$@*IP^d_;QA(6jP*Tw- zQ?5cKEgd}rBNH=>g_Vt+gOiJ!hgX$qKK{U9kr}SL5FEJZX7CUJLckSQU31Be5Fr#I zL1c&mQ6U;chr%F6n2tDV$T1%q9PR697K@b`87R%k@18Yi?|oa&+}Hw{>>lu%_n3_F(PJ`6#Nt`0$e zVdM667!VJSrU5Md*nBWr3&X?YWO#~TS1HorDI&lFFbZ`;84eT_6+glOnwMmpd*ME$ znCnhRh^EDlqhO1f>8t3&+ewp?=v2^<=Io&TCcf@{Fjiv@!SwVG`7mp=@P$dv*MtxP GG{*pT{>~Tx diff --git a/fonts/KaTeX_Script-Regular.woff2 b/fonts/KaTeX_Script-Regular.woff2 deleted file mode 100644 index b3048fc115681ee6c1bc86b0aa158cfbbf59daa3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 9644 zcmV;dB~#jWPew8T0RR91041yd4gdfE06`=G03}%f0RR9100000000000000000000 z00006U;so2FbSLt5eN#^0K-)QHUcCAd<#GT1Rw>82nT2kfhQYDH$_Kij|0iTRZ#qY zNZ@1)o(`sckdP20P$0OrPQ{=ic2J5&*+!ChSkp2Rs1rz~I>ZN2PfZP|%j9GmD|WTN@oMZAt6{_tM4>FlNS+!xZI%6m@k(BVdqZ9U7OrP@-QZ zDBh>VZ61-poc=-&g!PsJ<)aAAxd%3xm6)*>1gS0Utr4p)ZAlI?JXYBXhb0M2Hmv4w z`qBcVMq}{1F}fMHSKVYN=uS;BpHyJ$R^uB+H$eF=QH}<*T-c2$aJ@P^7yu2 z-Mtiyoie=cd}N5*+qb!V5<%xkrWzK*;WFon#7YEP0wS@>?8G$DaA^vQhs4lIcYeY# zOaSMYc~2@i9Fed&Z5E%+$CDe(5OhuY1SC}40@d3`7Kb8(>z*gq9R_5(Bg+YzLpT%d zbc8If70x*rfWJQkUFOdur@Q-)w4?wTitCmXB7+f#7!2_Yfdqy^BEukw;gHNIkiw{t z%4j!bLxQj<@wU3>1r@=2&hUIs<(xwW#_yGL4pkU`ZXqbkE3N%bd!wfXcM8hn!k_xEf7SyRgQA1A=+4C%=qEsPwNCU*q>FpVo)B+eG zq>;oqDev=VlLi9N^_`>4o~pQOMeQ(Sx;gN#)mBIEr1>+Ja)A%}-YcKQXCG@`mymo&W)5^&tLay~LFf+whwCM3(5 z@^YFQ`4va_BSXC_yK7CVo7Z3Z`T`IVP`DS+xS6xtXQtT5VD~tw9H^7YTutFHDxph= zyW`Pd6S1spx%M;EuA1R-xw@y0ZmV=6$@n}O2D(ostqhdc*P0eU85$wR*vvNi5Jr%J z?q=omqhKUaWEkhnr0E>CtsQ8ei5EiJ6HKNTI25v?W(=G~NPtqOz+a1Gx^n=<>9T?vmCQ*=yO8M< z;a#H$?prRMCCIg`MNFW%^sH|gV9ahhj&0&BwFqMsxalo3evKTs9 zGgb+0VMGsWMGtF34{Jw{>d+1ynNDkXbZN7-pPnnAN)XT(p7?^o<>qT-5@WU2mOVpln?dBqxix!{90&jvh+{Y+)nUa}VFIzwAo2+s4r4m& z9t4{}A>hjZJV64jNks1nz7Ad>AhcF_>kA!43M@jz`UR;=W%_G3XS z>1n4OV5C$2U0)*N5h)AsqYygj2i+$91GmQ0P`V^ySFToDK^Y2B1jQqm^5q}#Q4ooE zcTOrk#BoK6l70p{mWOMMQxA!D`xA#6iMb{9*7|rU@*EeyD3>vo0XQhIEl;LvI#9aG zuu#a1i9Yh3t2R%~vx_{&NWT->!y#SLtc;P>&KJpho=5W(t0ifvA_GBG6C7m6d35?X zMoTaf*wZ?TU1=)vL9STkWAdXQN#qRaFUDurr!F7)X-qU+dN4ijZcn4NxJ0bBhq(s>o4Xihjly3+c!zuuaj&87ZD9$goQs^~YQsr^m@rGJWG?qzezS^Q0-+@tXZ;ejd z)tF(TponK$x@pp0#1n{C+vh=!L?j-O=e;pCE*+(s8-ZyXOS30xOG$CDm3+uh+i&z{ z2>C7G2SJ|2s%02|y^xWRM?5Kavd}F$;D!Ol=g^VZvN=KfYfXVKGUZ*)!S zq5#|%8Wq+u!&GSD@)*iK5e=uG37#&Z5ij<{MH)vFbtg1Zm^t9EIy-U()4)GaKsTvixfM3|dWjNyLC+>nh80JPP972#z5W{Iwr|?`K|AQN@@rygHVwGw zGjiHaB1?Nkgvrd451uHAB2kArBu4%e#xY8ir3%5n><2ONxZhi9%5#zhh={bb?r#X1 z?Pc(e+LM@prZkqR)0ngpK?GjmQk){*LD3eFNgjdk{5C_x*;JNFrUm7H6qYMwNj%c; z=RZuL@V7DQyCWkm9{EHW^&DC4^4QgM_p6I4AL!B3{Q@!z(18y}Z6k(wGpU#NLH8F~ zCemotWn#oWHuj6)x$N=}z5p)*fgo=)24d6G$LaW&e~K;BU%z zvlMP`aG?&=J(u~?p4{hI%Ec|Ccv^$=#+P-X?AJFjX|pi~4qq+`^$vrxdQEb8LQ!5k zN+Hlx1W)jmiV>bTfrN0=VcWVk39e8UqmUa^&@~=z9G@Ir3<4oOFp9x6BG#z?q!$^4 zG%!Qj5ew~!?4%~pA)K_0!vgBLEP>w}@I)EyJD>iIL|KzsYJDi?dDNg?Sd6#mS4@HE zkZzYZ=_k}u^HPudxOLFO1uWj5y9Tz4pywwXhRq<0Wc>^l*k!DppXx(A|G zfc=leU3WUo)VBwWEb*BK$i+OnR#J!42`qmqFr!!EM)=m`gJq=N!7f#47&3p-zH&&U zt*3<+LTU__&gY7&+=FR21Tm3QY72?@OSms&@N7|$rOMp(X}EB0K(Tt&94!F->jd$f z+$f@4PEx@U<=oYmNvNy+AI?)|<{3v|MbT)P784gF(7^h3Q5m3YTbFsYYp%L$B{(!) zVCKv)s(#4oe}dXO@!E!>tJ|e|Q8A;D^f(cS30RWYz$GQLN)>_ib_wOY&8j-TDF4Mgkk_bf zblNF1*Cf8;Rv)+2+;;4QRlWc9`x}c|Hxp6ZC&UprfRjt>jLX!{-Eq>c5F8xV0pRkv zDerr9z0P8-z8+O76IsP4rf;}Z{nAIMoty<*^3XB|Zfhe!bG2Yf)pA5r)lCpdjYk#s z+oh6ylND?pt8;gsCW+>!sS|12c;rqHhk06UBQ1kZlcTJXuDJuR9N|eH54OZol^s&p z?ua?^l&k@Hh!nKXRN9C6tuuG$O0}&~@QF4IC9j}VmXzp9Glz2P$xYs_Rq5vdW#9t9 z$GWFm*KLbfI)lot$dN3;nLcQ#Pim=iM8bCzAmpsN zuTQYta*L{!p>gwMNHj~y<7R_8(K`(5&IWEBac^`i+kcB=x)jAeHHJo&645-AJVujC+Cd|1`ua-u|)WswBqFie%u;LaR1v|YKR5T?s{6m$K z%eh=~%B_$(N7HW8!=aZ3Sh4C%>XIlC!n#BiF(~F!jU)C_iw`zW$qF|RoiouNdHzxrTctQyH*djI0mA)w__Wv3&6vKc~oI6da(fH)qf z7Y_Pvoap%otehAq*O5bHgOWzV)mr+zm|L$!_;uXR2zl6;mhP$YT=3Fr#ckD|VYPi9 z?5Jm2$rD9%)p*8bp4S3hpv0Q_xb#F2sF;%$9w4;!f036uH$x@Y-V^oy-A)tfhfa7( zoIw-#JK1J6RE=V3Id@4&#Y3x0bOG+g0_*51tQJIcxy)tA(x})S^59Wr1vKG##Vau} zIlRYO|7+(Hgw)}>J5vW)+HEVp%p6Kd&R-0ng8HcDm&1qs07=-hA+R(jefmi_(1%^} zMrs0#hYs(h0@97KCzE$EN~yJ}U`sl12Xpl*VyL-|ut~ZPG7I|+tB~w!?Iep@-huJX zQiTdTv|In~$SK1m!5Y<`JU!_Lwr-i$agxEcEdi&_B9hiWN;F5-+A*L-tDDt9rG@>u zMz8*{2()GAjN4|cRN9)_K3RQ!@6?;CuB_h=5d;h~trX;x@Hyj4HOpRIqh*B)Cf@aM z&T*^LNI+x=2@oFx0)lBac0Rpf}X(eM5@Z+|s&t;4ijacmFz&N1Sv>9Q5~F9Ssa}pKf7rE{@BCR6ig>|*IB}d2Gd{`2F_@r zkc%KT2)+X}bmLKkA_?NCbnkt=rvauSwI}fzDu7QHheN(cw-2$whuBBzWWnyw?*wA6 z6y#9RJGs6$9KRVd0u1W4B)NU{a#jHv}r-EfxIb_q_ghN)Kp#bwcV#_Zhxo= z&f`-5E`mDf^T0iy7md! zOun*+UvW`so2MkeZj?e5VENx`MKP|yr5HvSM0T9}RC~zXto^$sA-O$g%M<2391uK& zen>3c1Vbd%%$;UYu)=sfL`z)r`FUUJ%FS}Kwl}S$@n4Cu#2n21Z+aq}29rZ#&DiD) zHunCPRqpY+GB!3%+yrof%2CBL&lU6 zOU!^m#eSnAmNrP;c>Rf%_*bNs+Ke2HW5wa@w79t<;sioJ%Y)H16#8rC)LA%Vapi|y z3+{H;+ZeNSZy{UQy`g$+Ds0WTD;_4qcn(_H6-$xiR@!<&l$Z#AcH}GZMD>ib(I=*KHt&6 zjmStql4R}F7w1>emy!c$M|}6H2QTa0B9QQ5{(Np>*xfRuNbLf$5Jd{?~Dp4&;10vzcI4O|d$fxh3tbpo;{J(A5nTTHSE zPNXy8bS0G{z$tt3e0N1GYH~Co?$0Af7N#las5^1dVZDW%oIKLBMOYkEQ$PE#Cb^oG z`b71jHJ*W#N!jF+2p-7h9UZJJZ3(5Hl61_d7Sr3;)aE(ML;j#YJuW+~5erHgpwq5EHes4%5h z$rqd^Uvo5;^?Is0r%~C~Qd#2hhnJX)2ibIH9Q8`muIFJu>JY5=|CYQ;F*UU}UX-v9 zXC>uVv~*N)tKN_7CLn~;OhxkC`)?xeOpK;k8auh+`dpHhG{PY0}_m zBzeuYuN`!)BKc4iBBiC({nKVJMw*U>0lfLU8yz?Mr>?u+N|;)7AdRLc0%tdblU=z7 zYV} zXb{h7InS@PDpr>;=>gTvbV2O0!^O1(UDX{<$B}t`AzS`mxEJK^;|?sBa6b+<<(3}a zz{Nz-?K9TWXnnvF+Bg6BE`&NyffRa*{CBeK+E~$8$(+J!6L6fDog6^ zF8{9N&;o`}Th8Sh|J=Z@T%%^Q%b|IsPtkH@?G7g;7NK zp_#ReURAoy;57CzN^=R2jKC3?-p6k*t`E=e@hE;@%28e4k%hq8=+1cv_53pk9VRJK z0a+t6@F^(!_<3yJ;ez?i$J=+-)X00X-Jw%i-X1G6At{A1>ss{TPNPfIf^!M-I7~|* zMe$3&Q#m*Hz4IeAN12__mfAB`J>7GNB`|*2PruUg#J32=oP~#9BY}QFkyYbnP1qg` ziFnUB12q+QV)dP64*V~BQou~Ma^lv;OXR$S{Ir6NUbn5~f5P!Db4ib@M9z3Hs(_o8 zb!>v@hk}0Qa$H39E;D)RETPep#hk>O?R=#AGtDb+Kb?{|rWo6%{XQqOa%obQ*EGD^ z9n1<+2FcP6z2!AU>Z8f+|9fw(-)7SR@Vk$7tD{_hu9Jijrj_||(4PCUi_7xX$OL+x zlV>r8 zF_y_Dn6u>4x{TVLB#nerFpWeLYn-vS#dfQUW})X4W%GsXii(OzWP!RtUODEJzj7T9 z!~^V$D|7iuLH0>{sZ)N;e2Vf~8WsODU{9J!Yw1rB62v~HE z^SN=(;$@XtD=&P;V+Ki5!1rIAkdUoskINp){vPtxsr`4wR4D>BhZ6N=kbl{8Bq?!D zy;A8&jH4qGNV1^Jza*vw5Fl8#f~3s24$yq#GO;(+>)DP8pyX1GUIHPZw)STnE~Izx?>qNu9SWz>a|hh*Q(J=3tO{yY8GIIDrTTbT`Z8gK zp*89!FkbZjxrOW?nZl*GQg>c4rL4q$`<&-je1f2;ulkPdcxE(ct9ojFfbp>~KeR$Q z*vMV;Q&Y-`3TfM_BzLc^`6}zyS8%AAD0ZX>H>G6W^{|#Sa(?8-_q?2x?64DA&Qs}d z5(Sqv%74ya21Ar51`VMV2L%L&eXzun#`>v(@3MG-dj)f6hGcLT<=BqF5`CCs2D9F4(?ni>g+qBA! z;E5YvyV++5RV-Xf1XrS1xDdxi?wmQ`XjM6n?Q(dmO;sO!u=<2J0;BKOSoa7AShlbE z!nkkKo3n&_FXNv-V5VjZj?I)bxIGsMJ%Y{^W&|V-%{r)`zgKCnSPTBM_|+nq|@3gXH|CT3&HPpzc*Gt z5Fx%J1UNRIIDahoq?e}){YHToZocwqW6Na#E&OYAm>q5ZDjJ_X`c7I+Cd<&pCHdO} zW^+V4L`wDv6HcDM8yXaAq{%mzw0BxkUd@>lH?=tiilnyE!y9S_hpO1PO_C{U!)d7K>jFqLzB!bA$}N#T}rhO%WzB$tNZ z5<)69R=jL#DNzk*^quCF8p|1!snW5B3{MXj%b6BL0K?=nfVQ0EsMyZIemipr-y_WN zXY+*I`k~hQ)3$q@)-}-kiMXL{N9XtNPupO4N06MtH8giNtvmKJzWB`()(nhdMiIW$ zcD*j%Gi@GUVe}nY;EyL%wy+`yeJ1>r>AYS&kJ^k-XdYn>(=vxKzWyenfp1ZLJa0BL z{;Dz0?`Yg|TU=C6{1{{&?8z-ZlbJ9_!rl0i#-Vjx63|2dJPTuA1~LU~lx{P5d|#H8 z;QEHldx}q>pWF&(hrg9daL}9;()gl74D!^9`9HUWhOkb*@`l_tt$USC?IrT}S5102iBo!l%tW&a7FX==nDe`5uJQ z+|^eBo#*Io&RNJif2U^93KBQ1nB_W2DT*eD@0=WZ?$yb8LPB_zNyw7N8U$s*hgnV& zLQxj7mgik-IH6`i;CUE*-&oJ*9;kci{zG!GhPFx*bh1UamHPl7?_D*^G5@*zw@Y$C z{yzlw?7EjB@ePPU^cDm`kgWP0`8{4=is|doj^U0$?YO2&T*m^CWKhog=!Bc1FaQ2v5 zv0z*Yg|j&vzz^56;*%W7^@2Ovy0P0kI(=*)n6}V2`la7<$B*n;>qcv*cQut7^em76 zy4$Pyene%)5k6Wbba){>b$0#h_gW*O0)XxdKhfVe(8wwJr*e=loJ$tY_dhq9;@^Mw zYj4E||8_t}laGsB3q@-t1TJWL<`Ad)Q*@id!4CfX5RoZau9F&jBqR=5Lr0ZMp!8^l zn0ZZdW-6>Dsn0FK#k(PP%_JpPZ9{ylDSs8s5y+6ChyNn2oA?^uUNK|zL#9ll${8K; ziu}wImRN*<9w+=CLQTzmk@fuelmU~5W}0CLP@_3GVoh`aB1bx4Y!^BZ9#=b18HMP; z*ox_%_|pznbb|T&%9fiSvl}pIo?%@&bQ&d=p+#ol>u9bZU(Q%)sZq?K%?O9+PZ;J7 z+e8Z&N?CcgPfdj`{#318G>KAB#YCgkk7*^p&peeUQ7Hs98l{p@F_=V1>DggSubA&L z@BuYC62q!$lciLeKe+;8QTLH^x@(w4m86E@$PD;eDkcg`F}jL&P>eZ$KSerf@W zY!uKBNAlrj>iPom9DqSUI})<2_Zvb$j%PVob5S#6SyM9!tt>-7O@$6LFFGa8rk@fQ isFOeq9&M@oI}Pp55h!41eSwD&UH=U4=~t{3ha6jZwt}$$ diff --git a/fonts/KaTeX_Size1-Regular.woff2 b/fonts/KaTeX_Size1-Regular.woff2 deleted file mode 100644 index c5a8462fbfe2c39a7c1857b9e296e62500a8a8a5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5468 zcmV-i6{G5RPew8T0RR9102N#S4gdfE059|a02KrP0RR9100000000000000000000 z00006U;u&y2o4FH3=s$lu0*3V0X7081A#sZVgLjn1&II$f_@Bv92*KLBN4WPw1P)b z3)3bP{M7+ZMOOX{84}{EYouy;ApBw9=Qs{FP0hRc*Aw?6B$@qzw)yYf9S4VSgo05A z7zl`f_8gK{O=qmAg&P)PEyG2}$L+WKk+J>AkO$&keSc`inv`I8;lfZX9KysS(r(}i z*Lpo){eO>B_-D4>9=lQ4Jw;-EVp5rn#HBiO$nW6F0iq_%%*u%teFUCQUCBk2umWHC z;T=0b2M%fJ5Tu}V>)WEfqIOq3`Pfp?*=cQR?WNZ{fxl%tWuiAb5bdyx_#cO=`4^xu zIU6TZM1Hd0y{z@q#Ti!WmDUDo!AlDZzJ^FyYFPjKwchMKuU`jQCN44&4%xZeZ)VaM z>Iw9pQ1?tKHJ`gX)PV&ihmt4+B$tKSm8jTl`3SqUDz(;agi)(Qro*~=E~XJE+y1^; zj~cs9g+dBJ27e5M#tm0NApBGkyr>fB+$PCBP3-`1U*aN#ofa z02^*flicKVM$I^z1K2*B1YyJ(FGUq%fXH1G{AfkE=`W1K$&zE1C6+lsJW0GylAKbt zQ?*<5it07BOr4elbKe!AtkmGpl5+x9p0 zZ|dI60tDeYQ(a*Ypw;(-$m$iahgImiU!J@L+Lt(%ru7-Z(zPH6_(ytVtOYs%r}kPv z&gcpdaCuxnK&{#W5>lU%!&?)sOEe`$mx*E5<@Bl(f_o=|NFmrwHPrPGFbf}IG ztTNfMqe;gDX(iOP(J?x*J_Q>6acGs1a7XGvMmJX#~Q@bEQM`qgTq*x4WT zq4(ZGTG;vRyOO497aFhMY}@8jBxxvm36fcqYP6*w<(M!zCIfmnCSz&FQj3IkG#n1% z2qmI`3W69y&o&?o5l29?2Wq%&xn+t-NvE8pD`^-3!z*6!g$qgKjbQ7Ml~IWd`8vyVtgXDC(9C<%!U0(A5s1EYikCQ>4zi4~d0ohFf6E zTmWf7ujlIii0w=#Ee*KM6|rh!u~Tsm$hB+B)~F4yZGi-tsJIElqXH?Gln4^8STY)c zx?n0Pnjwt}WFWchY*DVL;BPbq8KX2FL3WaEQI6F;pQ)c-c#8O_Ck90?-1e)x*VBz% zwCKKLkU`$vx8#rpi$mt;UJAlomfS#^1;;9ow&hoOOhcJ##>9OjJ+<0DN#+?C5r)nw z;sFVVNX0O3rD(UOl-tb!H?#*5u(lZS_M9m&krC@Xk6fMfVbvj4z~dxd#!V+dHgRLZ zIY$J=tQjmhrln_dXYnMIm=YNj9i&)+6ekFZ7bGPJiV_8nk_1i3f=?+zK&gV6uZPxQ zozQu8&#RQ+$3?^Q-3?kTmGXDM?3Nkw?=9mnLCVuz2$u6}#cq)%RN$b7Km$P<1nCfD zK#&Q67J@7YbdZV>QZ+8+BqMQIy4rTTrDgOn_HNY#a{T_$oEDfNGL)*8HO=V^fBS)s zA%QIUt8!)=?#^2u=F`lJioSmvlJ#=)bo-%|Jjka6=H+98-N)YdbAuj}QkOw_2Et0c zZ#tLrq9WzDqySRMFpU6kPT!$|LX>x^f}&FxIbifB3}Q-AO5P;U>WoT)XS8H( zH~y58SEfiy|@R;f~qL>VG;=`?))+CkG zR+0|VoKb`vK-e?q?J=XQ9A&l!?1&LOOZrx|OHe5oVKxbcfqFBai$XyuCAOF|d5HyD z&3Rf*Eh2-XQQ7MUrMFcnAZygUP)@8joxGVHB7#kx7qcDsyW*F zcQ1=*3d{Do5iXCXKB?4oHmHRIfeGrsx!oMJzET$z23xpL(eRK|-|VbD-{*R;i@aIX_`JvM^)2-aEau zuZdwdtJKsudv&FA#`euj8{(yk`B2g!$F8Kj&9u6H6rIZjsyxN{?^C@F7rGT~w<^#L zNp-cxFb>$99w{87T0^AxNp+h7Wv2K6#ZuOwO^V{38PX{sRa|zoQ({5VP?;U?p9fq_l8p#!hrB4O9f{-0 z6LRp8{0C1AWH)Gbv$oqK7y`H(fzRYiz}>C85&KLtd-De*-7q5Er%Atn5M=O0?%+mp4-f9P;3c=77GUUta0CGKY9 zVN0|0U%1yiao_6lrPTh-e)AWbare)-^@mGhEZO zsWun^uJS`~W^}{L)W-B|&s1Ff5;>9Ng+4fs!LPUp; zGb=5tj9_^l4;SnDR8nmeh%!@TrFQ6Niz2b>&7YHVGqBa2F|;AzV>Ecw@Ls&996o$R z6C&MitEJlQALbLwY_lmFjo=njqehKv&6>{)$*rp(qY&=Bu}+F2j#OHfpD7YKte>_^ znPlK_B{9#*_b#13Q60X|uVgC^f;^xPS**kg>r}F|KFVQUsdG>GZMDWy*43ptP1GtP zddIA}6GGyh&uW?SVtQrAWE$WqUvPEc%F9tcA6m*)J2|-$MfN*vrMa(61;N%7p_O$2 zgstqy^MWx*nytZl9d`&}%~v6HpCCvX*U6oQTVWt_2!j{%-e;e33Z+#_sQ4Hck=47@S=8iKjbR zfdpZq3AUA$_fOPhU#>fGnAi4wYfapZ&pK0+6KZ&ePt;wm$)4z!1N}*pjmHx^pbxc^ zYXW?*s_ zpqY*+uD4rCWi9LbFXq~W%Et>aHix0E7CZHw*Y%!3#kO)`&EUEmyWxg6t+wj9KlOh; zw{YuyZy4;W^-y?{KeA!TNml@tZdMc&HJm!ux#8=__1wxmZj~)>KiqYC zzW?w2Unm9oPn`SRyze0OQx)GKl5w=Mym;iW)3F)mr6a+Aga2UEo@dre;b2V(?DSh@ zl6oPd5*C&?tcR!_I0^>+&VF>f)eQOV>N7n*Onn=vU%AT(3qMe{$g z(N8bCOTxY=en7G+{@J{^?G?uDZxA2yK7KDpIdT1eTgSxvB1&n%&`@_?U_S~%VOJnGj{T~Tg^G%{;`8qi(A|%_V=sNpZveZQp4q{lODsSbL8ZQ7vT%CygJ17{f$#sb`fDB$nkcQ! zGv+uQG?~cvu_Jdb)f>tu2WT>ZS0UMw#-o3ql~)CxANi<^CSqgAJ@Pa0%15G4KHu

zJ2S$!l`6KRrrfazuLhIEO`|O>!_M2AYXd@C5;)BH770f?onWuC?JGuSiGETHx3r9k zo0Ecgb>mVEq0IOo+CXD!QNDDt~BS+VCt^{^Jhqh9eBg zm$dL{-UhA5hoZT7jml+tr%1-}m#3^qRb1A@2YI4Xxk|k}SupeV9zeZjlSN0W70t?O zRt~3~AsF~*SJ#t2QrXII^h4Y7y3*^TW(hL`s%hz-ojX10ZEBCNMUCOEo#`Gc4ER*7 z!t5%+-Ip%B`N<*KO1(0?Uir`yvK@?zk#6kp&0Mf0_P4CU`v;RRMPioB`9_=C_PEJz zT1O|VFS;)JJlgM`ydO#Fe5S*;C#blK3I}_y3vA&qCE4)M3z7j1`6VD8sq8G<-q6fE z*G+}Pw%yXFU%c^MqQo)*Y5kOURlmuP zmj!OI3dI9avuWx6iV6!cXGml=nIA7%hx46&xWXmbZ^Wxori!b{k|u-V6%ahU zKBTU4_PcW=rN0RzQgwMhOy`m;`Kw)qao<$VYDZ>irVhS1(hRH-L2@v4F^XWM$L?jMFpRzs_ zGj{|EAB{OEioS&2pbmCsZ705MOYX`xC|sVjFN-QXD=p=IC_Ics@Jg&MZiAwtiB6@o z!Y0oHubjMN>k@z!fv}Z<5E77LL{61uQ3Rz^Q36=FH%daeC!-W(1fnPp9D(IHt&WO` zc^G~#3whDI;MA+nsE{u6EfB`h^2Ti(bK!@D0jG*x z&q(MYV3sFEsEN{xE_U+@OtWsIYM8X7wwq&`E{n~q8MN4-U}zRnuJ;cN?;~V$t}hxR zze))X>y&JM7+_4N3{m0i)gX)oQwYM!=J6&Fj$lEs*^@knz_+uAQLZfNFU!D-cq^rb zx0G5pZ?OdyB1U+bqJLtQRi)iLHy43VcsDyEfML=EyW%59Fb8PG5Nrf+6;0;U^XlGL z6^p&56Is^MCM_5mr#=fB8c?UCj%0YK?dR=7a8ZGSe06wHs~|i>EUE8k{I^U z6%AvAd;6bpyoQ8bedY2A9_Of`*Yk>9lWY-ILRcp)=o^ruRtEU%rySuzV-)Qm*|$GO zgL1W|eFB8MlO15uGJP=i*FzMDK+dX+&1{~4fVbftB#}ZM(#S^vGH?ima1hR76pG*o zijhSLN>PS#jK&yLpb}#-4&yNa6EO*sF$GikgFRg)lijrcaIVS1gu8-)x&OguQBnNR z>UO?26zUxw>|KkU&ev&7zfa?frYQW z4*;S#!!}3&*Fzd^Y-*3#Hnz(tAhdJu6~H%02BcL0RR9100000000000000000000 z00006U;u$c2o4FH3=s$lsxYAz0X7081A!h3QUC-X1&II$f+P%q92+wyBN29Uw8B&T zjAYNE z*U5HIKt@(Y5~%9o_QfZTG-V({TgpY1umY=WfOG*epq`8% z1ttI4MeU-#t{R;oNdjV3`v1RHY2W=-wG4JSL>@>d!p4|Cue#>c|G%39Rl^N~Jtte$ zyJ6&>4iG9Q^=4aj#Y#*NCJeFlO8dM= z0T@1kwgCeG+ko8*0Gp&sMjwJBfPq-!GJqe%@a=c-d}@b9ec(lwnK(J)(Hg16J4t}8 zoj8ciNH-U5QF3U|av}lIkV4k)A{$6Xb{!U4>=^bq_AZWVCE5h7P5X$BXZXs5nFtee zW+ng<#jXuM5z~rRZ`MAjV;jCS!8iBrBJT}bZ87*?$$!rO1n*<-{r%q8@3p+Acr){L z-Rs)dHGn|;!_@F7K!B&viz4g)VC-%k=EL_u`ZCm8I+G^w3Ksc4TLz>W>WQ%ycmt^2 z7F3wL0|7EWb6`-b)`3In3w-9*i0>eyq~r@W)A1#>n_xUR7x)gwPOc$t&s(gki^-$u zJg{x?RyaF)@IXRLeJj&x#qB9@8%|@Z)UJMsh~WtZLz0uE^z@#pky?2079#uJQaZ}s zum|%fSn!B@KY}H0nr5T+wxPUx9*;ced5cn@m}{u$siaEKn#R+E5jm4)L%|SNC0UaQ z&6S0l+(C-b&;Tspa|qlA;9{w)=$j}~YEv%qKd}sP_wkP*WsG%>pcG>q6kiIIR39A- zNKYUl*8+<>0hW8%?v1s^a_(RzT_#Ecg;jdd;Xxpj^@C7|x*MM&=;*mjvdV5)-(Tm9 zT`k#yauk^A9Qsn&7*u#9DFOrwUqmyAw+Wh40gs(wPGT)Y_-2FkUKF>jnwu3#gW`@d zy6R{|!~tT^)Z2}m?U790V*!zA7ervEMXKAEO@WaDT}KB2se9=mbdSJ++)MhaS{Rkd zksBl4dmc#spg~2E4OAgENJuS@2b4vWrkrTx2R>+m%D?+PO^f+$5>0TowkhFwJo4GdZsbTH^) zNQEH{1_KNf3`Q7Cu(Ay@H*U!uPpER~C~G>Il_Za?a{FjtPoEp6?QlS3ASEqnp3>|4 z9>;0J0XY+^QW5qY)!pskXcvag_QLxZYtogZJ&r7=L%z^&pM??FpCix54@!i%wFGmQ zAhuJupi?DXXCuBs+>kjLL=gb2A0S5tsylNIPlqCiLsNrH6inuxNhJfoFPXm8X2XfU zPdQ6CF*btSw;t8mo39;N0Op8u4-whbu3z|a0FQ4@8=H&FH6FJ%*#)VVTL?_|CM`nu zjb)qOyKq6+q!~DnO)f=$yRC95jm|94eyYbsuo(E3mv}VY0>M-(4CcBvak*@gPPe<= zGz_!K%n+IO^ORAz?1KezeI0I+nO0ERSBVs1L zfsq9|H#IOkw*VLA_dWN`rA+4`+#x0SmhvhdLnU)+P4l`_U}PkO8PdL1-@znuxw|#* zl}!7{-)BCZG_$7D+nn7Tcyp!$FI3H3N>D$JVaM#nAak)Qw9dTItkt*SJ+rg_eg}Kl z>;e~ntkiF`5M-x@+_}<@VB_f^RYg|Nb5vA-xhhR&{10aM?i0Y{)K_D-R66A)UW~%+ z3Gesvk-bz5YMCSBo7p+%bjsMO+0;g|RMG((Z@5vVlH517I_H>nl?aq2XV3n83zxZn zqdUUgUN_}^6)2!wj*L{S1eu7c?h595DwXsbUBOwt8sj=g6%(sL3sPKAE0pR+Awj)R z-GD@iXk?(kC?p69jW^)}1PU^mEv1&xNYHAjZbJ(s+J%Y^p`uf$?(&Qx(TyX~gCo(4 zBOOAbRP^B#O#QB!W?RVIK@Nr10Lv5}jQ|k~nK{6KK?~8)XapFx(83W*407^lMpe0} zm$Ap-)8nakC-D0d?Ic2uh)z1lnsN|LZw|SxG1tz;JUiFS2ls#l-hmc6h!#0WExv~5 zVTqlGrFI^c*?Cy*0j+Qlt#puD<%j2EwVjVOc0ShH`B>)xt#=S@a8P-pOkiy|z5;B0`Dg&Duo z{{GExjj`Kag0huo_(dR^h~Ac9fGQZA5J)lG!fb|9YMK+p6$Ei|f*{3QRDUXpSRL25 zR%YDCSzjJY=y?X$m*(@e8O9XQOx5hp{ z367|%NIeBpe`Dq~DxN^fxg$6&KAc`mH#m)dpPOZz8%k&8IZ`WLTH|I|q=H{&X-$Y_ zY_4DH4_jJ4PsS?+#-Ide&dEEM+HF&9yZ+jUj}U@Afzie8yGN@iAA4)NvT{jDvaVgj zTJa=B%tbExX?KaZn`}p;VSO>w;$C-taFZ|lacbIf8+RtQ?;k37Cnxql3 zecblysBw18*zXr^xD;M!y|7IzGxSw#`2Vqwuk7$o{js0pv=}VjK4rV3n3nOK|X=sugo0QRG+Dm zV)13{zr%&7-`U14>_6$G;XOxc)+hO(s_0#W!&Bbydt{`EekLc?97ykv9K5GEtB6;S z>SHfoW=*8pTfno{38~p$Z_`XoB43wH?}qBDKoG61`&f5`pr z+uqNdLn_GgK(|@k@&)c=pJcD&^wr+R`*c!L9aE5|fHz)m5zU_^kv;evsS(btcTwGK zzJFU%2B?z2as?$q30E+9`I41j47Xf}8#pxtl;@KsZQ2CZNcC}>w<*ivmM!x9d1l9Q z)C?@vS)!Ad19oqE?5+BNn&GbB9DV;*cUh!{QOE;>(k~{6gZxbJP@a$6LHR%a@L%8` zq`vB7Ek5jR?a>F*^0Pq|i1Lw_5NlUH1EIC>S{yyyzVsLXChNk=BBx}j)Q8Q>A&Vs+s#Ad4tff%Nd`UxQ*s&x?5Aw>QU>m9O}pnRQY7(4rj~>^ac+k^#}L0;gpy%R_^A3FHxJ|{Pa&|{oNt035`@LYj?X*C^#Wi`Mnr`o z!K1IeU+b2Z7XA1YlUY!Fp=70=FVL_2e`nCkZDD@(W0AD9*8To#j|zkVA;;sq?r_)C z?%>0li7~79%I1$xt{kH+#pbOv2cCnUm^*4}-Hz){5Bzc$`eGH1oxrhIiXoW%<*XM! zfuTod{Z#<=4+&MsopXO1`CBZlx+dw-KgfEq*igFE5j3r_RN48r{2k`2g|9Bd0z2ELs z|LedXxuI!o&0O=my5b`}HAK}lyG9D0;bS(?&!3;CK)9#{y>ec%j#(zzp{wsH&!JMY zPi7uyhSpRa3zMbAt={J?<=7DNHE(;|Q^gq+Dj;_@naP)G2+ij=l(Qv#c|rO;$IKte ze_t!vJerw(+GpI_z!ZIwcIeMAX_^vknuf*l1KUyTKRf+~>opqJ7_A{2+ zmFpvuUP_FcQB|sR+P#{uqzv(&WGmTXcshBz>Ohx%DN-*{`1K=qJ@2*V6{wS5ocI~K z{tYLJ3-}lC4-2-c$7q%SOXMy*ZRD8HJ9KTfBDre|#zUHlo1-(I8u*%tvl1bG{ zt7*-W5(P8)UO}aGD1N#2-9_-H{G#@Leu)}62{L?s6J#46bph5D%s)vNRS;wN{ZuaXs)Wh_iN6p=oWl>C*{_I;x; zVn7~lD$}FeL?ex5?(V~a=1Qoy^c^Q}X;0Jmy$^6W+dg^qR9R8{kYU4h)(Gc;dvW@- z_7;gh0Z(w_9^N{=bO7*`Th(WzlAALsU+dr~JMk#FEol|yTXvL2oO3Oo26%_+k939Q zYy2i22@}+=Z_TS$f2g(V6gRta|FOOHC9;uDCNCRzt222E{I3yRPKC$P*93tvher5Z<_nUOyOQe2%_q z%RaV35O%yXd+@EYou?;LNAmC5x!}->C*spb_1EH*&sXf;zS+AL99b1CI_9!BM3t+@ z7Dlp8CbxATt=?3!@Rt)u1d`+=#}KF6(r-I_+88zuPn9U{E-lVa?aCngXIU-SCdR)yS72!ybSNc^_@>`|6U?i*{S?b3xsU?x0Ni_R+ zO>6M!DgD&6zxtS4u9@_<|%l4L30K~60L8uy>;&1E>X^J zY!UwDq-Rm?@PpF*{44wS1nXW#Eda0qGnJz3bwO*?qZ#r4B3AEO3>f?kP8f-*=E-c#63Q zlupdWKnQov#i7{aa|uWb@aHnXA8_uI**aH%%|?^2q!7|WZ$p6*qvjhIc839zNR$vG zk`s-V$to*HSd>(#--Ll0E@+Se{VD{j7NjybaW-7{(;d>`Q58zl;~KuOM_=t9GGB#& z##J`!(jaU>zf-;ba8FYP^%z%d#IQ+8jdxAICu5_1Lb8yK_QSf|E3hgknQHhZbDD36nD@~Pgk{Q$Ex7DXkQJs{9TcmK(s8{y4bwa3kQdE=C*eGNMxxVV#)hJeJSinGR z?99rX($rrw-*>X~*F>o%DNiL&Xz3S>GH(XiG~J{Vch|Q4CoA7=Q`Z%01^@ z0Ki*H*Z>D8yw;<2b2o4FH3=s$ljcAHC0X70816~U<00bZfi2w(I91MXR8`~Nq5q5L5B1BP= zCIf=MO0b<-%=R`R#gQy8VO~)Y_9Wg6A;jG~PCYawUBwUZ z^xD#3Q2{A1%A~TNHb90A%~8TOOF_xEzM^(fZ&!V-?SKLE>MQB$_yXG?`2Vf>+IMF+ zMf6O*?0YI?jhRfcIhmdP44afbCn*tG07?l^l|8T#J$14|*7;Tf!RQ#O@AV?Z$o5!j zog>ReN(nARZ>{%T1}Oc5>;wnUFntuj*8YacXUHNHjn;#}uX_CSGwx>6wBhY=!It_x zV~gh3aTl5UZNQEu28~1;USGtRREQ$miY$VE_CV;tK!y$J7=}i4Vik_l=jlfblh8j= zO8q_>4X_~%!%z@ zdF}#VWi}2l}?SUCU+9bog+auC`YA(y*wIdM+dVJ-@fIc91Ys(vwOD$O0~hLlcQ`3 zF5_Vu%-S(Au|Z74#2C1i%!cKSI_ZQbFJX&sLz)hAGM~Wb=wUo1 zeA;=Sm|Im%6Dtw6<-!oXWKdNbZqqN_IHkA!T-R9b-40u9#=POmR*IT@5?nVim`)zU zrNaeOK+WX=9r-39P;I6HMso$)TtHfbpxO+mAzlxn<@_HjO(F8(s*-J79xsk1Vo;9= zC${7Zh@_DV%96>>Oriq9dX`C_SWB1mSS)6y2-_mA#3jQxXpN_u63t^`NKyl%U6ED< zcK*kjA?eH;(L42N$p>_(v?J4w+W|dlhzL4=jBl)qG={>u_2DpmzxqwDklJK97*XfbbqY-AI74rp;wZ8Lig-qHQ zLQwuCs>g?B!kLPWyc3BrlL=ZgGzKb@{MR~nR>tL$n3)iyoHwMdN?)WaF5XK4Gb*NI zz(N@zE2GqpG1Q;2G=On5knuE#sVJ5S6vxz=nNS@L3SWKPi7E}`?OC&6V6atjv;NiQ zkm4!&_ZG9^47wO^H%NWD7xP%0;sptUL_v}uS&$+~6{HE$1sQ_>{-KbzaA{a@##+fp z=W3K&PGcEbyU}()-dOj{W*`e9Gf~y2Wkp}$#~f%n5y;`*`Kq=jSKgt>+N_*TPvXNA zt>sM9m_z;9kY^EObKPhJZLqsIK(v_O6(=l(6Tu3XkIFbSLR}{!Y zbFB^J-(y2K-#bYGDwP?RMrOdCHIMLpA3m^|7KsPWCy3dQuR48sDNqP7^J_7Kby&AQ zewAepiOYxmP!nnUeAAAiIBB+p0&j*&6Vn2j+~;nxRA_L5Gj2kGFhiN zFN)A8#H*hB-6;&q+$kJOmz~p?;)0o9@kWVFDJrT{7dkB~P7yhUIwIL-n`LF{Tq+2CHcQ!{`^@eJum40N|)un=Q;$xAvYO(g@I@bl2Moj)Z zzJ+naZKWt}YN}nQmZ7%GJKu5}lXp{$F>;M7Kw+FXuo4u--X@4zo5Mc;9*)^;uq$bJ z9A@g&Dip{s$Yv=Jh1$1DD~+!31dl)!yWDoid1?O@vuYNxiPy6gTU~L!ZW4U*mqun{ zD~cmWvAidEUC%;SQi0Ld^wU3fz%$)@NiLDQ*&$jFlp=!3Ole9*$N{`e$ybU9s+a~>;}{~0sL_;aOA|qz zrc_@EqG-^R8cP#Flcr`fH!^EMX06Dq4cYWVamd=mlBSj-f@w_EbpMdF8A#epByARw zHXBKsgQU$x(&ix*%}2#fCe;FC46dRtM7g;r`K|@r-4~-0C@paiZK;FMvLx)1W4XSI z75c6f7=oKRK;Y6lQLe9qn^q;Eq{V8z#2URsP%jbEORUvPtkX-ZFW$fgy@8E-1Do^) zHXD|-#X%_SApS1=l|JDHz%LGL2XpqQ-uaYl8KI5lGD(wGylEd~2na|tnhuRpR-DET zzyy$A0r~+NrC5}qm=sxe5g=h%Hg2FC#P19B*GOi&f$zwn}2eKu{6Q7bkzy z)JsCupH6=#(;`I>RNnoFuJyg|i}*K93+{l-T%D*DSHE*8i)Z2f#6;-Z0_#py;1c63GI_2rbwXYf8YC^L=%vS z)EQ7jb8m0e!IO0#^rO4Yp2K1GS^D~__tk%RYQQ_dBAF0WT(}3*-u3Q3Ui02@>$`_{ zZ#$l%B=_|A4xFeBqNiU3N9cuu2qL)YFOO~;Z!Hb>J(L`YAgGIeu~;9W(70jCANq8_>tL6P9w|yq>8^&hrS^5;J4uJ%|No#+Dlal(3jU|;6~m#=@MT?zppA^tw6(W;r()=m$Avwkr zk@(?yuyf~n9j0!RKg)5K1DWq!W_)qZzO6alp+)?}WlMO^&_eEZxAr;Xd<=W(>6acY z?fo(CPfbFNNdc)_^nKCw(TdBrW&* zZHOirvt{1rfS?@owKAqk`_hjv98f9#Bs-TBXs?=7tFQ1Sef1h8!R`90JU8x&c zDM4!=i&yQG8XKEN>7ENU=pp26j2$j>+OHc^S9BOgSIN+!Y>w=(SF zgbUf*rR#Oq$MM1B+J2jQ_aDKx#VQ*!P`9?8mX|o;+4*v)aDTmisH%Tu|Nd)C+>0}m zTA6={7ZP47bf%ePYS5g9f%$WmlrzFR{nfDn==@qI4=+^_6`w&2m!(qyFit_LQWz4K zCslgSd12M>h95?MKiYUzuYp$hw&L4z{yxCBZnai#{lzGs07L($gOp7gN+OE>>IdKA zb-*z{jKykWY^)mR&GU)~TpcVwJiMY=SNyl2W4;4`mB~k*?4RXE&8;!qIqo6=0Tj)7 zE@q>SI}MeH{v<|5Zsb^S0}fjeWIWxl>1TNB8aRPfkVp0Smm)t$qQ79RHP=D2xzo{G zwmwkcMosfg%y*biN8%q#TDov)tI@<;!-`3uMvrYv(8`{iNsUEdKv*?^lYprrvwa8{ZKn3Pw0RmWrnV60lSOOc;;72-a zaKH{b3NV#D$%gA4YcpO>~>s$<@ZpL6q=vX;GS~C$Yi8wqCzH! zG{6WidX<5<%|6#6rJq*JR?wx5^HvV$iY~>lXhy+F^p8wQl}5!JVS^_UHzRu>namZ+ z^iM%x70W6!lBGb=`f(NAF;Y>~8qex2_rx)Qd@;~uJ`hC!C>^R~`4B@vsuVvIJX0#k zpocSV0cK=|iO)n}#-J)J&co63=RnM?GV7|MdzwaB|oq zZ87}%ab7O*a;O!Q9A9cXmmBYE(ap5f95`NNRSbOQk21kCbSTW_wYVp z2#Yg>BRo+NfvAb7S~_p0-Cl0*@7!s3sF%!~(?0cIq=^x7MC8vM1&(sfT{Ulb{^<%Z z_CdTjx#liHw%1-KKR67z;4Y}#cL5pmw#5}60w8VqI0*w(-jzf)dupz`HrJS`ou#Ee uC}16e%Gv>UrEUWr7J?uwp6K^lh(`Fkpv`!YL^q7xb{CaG@8Q~cR8|6Fhs$~Z diff --git a/fonts/KaTeX_Size4-Regular.woff2 b/fonts/KaTeX_Size4-Regular.woff2 deleted file mode 100644 index 680c13085076a2f6c5a7e695935ec3f21cddb65f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4928 zcmV-G6Tj?tPew8T0RR91024p}4gdfE04S^g021l|0RR9100000000000000000000 z00006U;u$M2o4FH3=s$lpb(dJ0X7081A$BnXaEEt1&II$gDMPx92+(xvye7xR)-Qr zuyF+D<2}WHmB5qvtDzk>iwqJnL*(fZs|06Yb!k(1`ETc1I4-BI5fi@^u8fdm)_=e` zdp}9j)YFz0DG~@_Kr>cMHY70C!K^ZDLNTA1b7Br>uDhMiy#E2l3s-l)|7lD20$2hm z@RXnGF4_PYHl#gB*k&mx`PNs|E@~BRiaIk-Yp%L*)p~xqH)tK24LDPq+9^`k`Cgg@ z?wr3yPQ)iMi`0C({fo<{L5l+`f3Eib=1O^!+?5mxbFzfbmnAs&^Jiy+y`4!4(_Cp% zqD;z%tlFv-x2E;!;w zVW0LxIo!N76;gG%@Hb~*66P0cigm@!%!Cno$kKtF{J6eOf$5?ZhZ zGxUV~z5L(+ewzJn*7bz*N{9T6&S$7sY0!Etm|_zlZIG>ifQcfRwh5_SQlHslg9^@7tlD^wLmOxkR|-Rl>&iBW8}oeXg=l3PGl0WW7UOHQ$AH=-*sQ_FPT5-1d5EJQD9Pn$NP z=&ex`C2L6`ubBa-+$U+ol!uAv{MKA*F%G6?$zgGfC`t3*GI6_Eb;)%5MJ*?0ruoG$O;U?7n^){QDYAVGaEVAHLqZB9$dHf<2?`{n zLBa$`NQZCJlm;XxcSy38uj#vUF*`Hs$Te_xywo5!OD#vP&QtM_|MGmbfNp9M$0RSK=0_8_ zABCw>{ZyuM9=Qack^&VKMj|Ak)m~&+sFoKh!y*qw(#BI)DONKBw}KKQLVnAX zG1&USa_<#$+$JX-mDDDeb~MggE1*$BlEb77LoKF}k$@k0xv!=(a9U`DIxRMzDx4M- zby_$y8F)ug0CH(Ej8jTz)P`gfLQ@?uVB-n6GIj$~)F}})=^B$un~SNqEM_044HB;N zhGmM31%SFVDb>`A0h1#dQO?j~Y^-I)6a-yTPH)gB2)PoKXk{Nguv@^n30~1Uz4`%@ zD`m4i&uZq$jbBlIr!`;~fTB|CWScMarV3S1Y6Ge}8#%>J_FVVI{x3$o9E61rv-C=)ljThD#+}}^zAw|gQO7_rj>e?#e`;j4(=L3iD8l>nvKp>+j@jEgyUwZEikoU zHWST>2naBxf=JYIC;){c0_HLu-=J;+&@vhwQB#6|W=GUg1Q6yqqWK8|7C1^ROpF?C z4J(R71hg?xdm%6l9Zb|25zxhC-Rw}!J;^ooCJ5+rWc?5T1CD4gLBNosqr+-OSs87_ zHo}VL7ojq>IQPjFsy3FWnUJ(p$So71-$xwI z?-zDt94hM6EP-*1I$K5)wa*E%kwg-TMNvt2=HcQl{g&m$ZUSxtJ5FpQZ$aTfFJ)Q^ zKqdy3I8BgEQ0@SJBhqaonQ$$rn0XLeCP8yU{np*|Vs>g`NUiHm1r*-6C^Ak@npARd z+~sMJ@odvPOygYR7IQ1sqae%e#;7iVVvO(o1Ck$0* zFd;Bmk#K2Cdlr&B;k#c9JTX4=Tb+%hn~s0mmbsT+pj5fN?boKS1uqw}iVm{fn@Pzy zlBeJ}FNK{1rNjm{l2+_Gjs>rRH35$8i)y?pjmO2P18mc2)B)8;a&4%GCor|!ue2l0 z@X11NoM#Ltr=3&ntIU+uA7Q!Dp}Y!^&Ni{D-6snT!|DB3i!jgBoFj`Q*i^tK&VyE& zvw)M1orI5?t@f#>&HD zak^D@rlVy+5kEoOn_MXLu0H+IQn&56%Sqs?@mfCVarak6{Uy;q{3a2bl}wz`wDWW2 zFe_eM+Gu$l-T;AwdpZ%+8c>Xjj9L02w!{{t3%dFTa16K4; zIWgrd&P@RPxY}Dr-k_JC=$4!E7KBmC2$MP#w->H5!6_>Pr9I@t|HRTurr;U-+c_17 zle`RDGL=Dw*u?=Af_22JyfNP9Y9`_6ee?*coA&SST${*$%I)9i# z>QCny1#6hw;;UEI`#w-TSOu)Bv#Nl9%?K)BC3UGOY|qXa&%vaQ&-k$DKw$9Uzn^>N z;eYm}h<1CJ|M-dDT8kDhn~;uxfl>{O`#pnGusBQTSLWLp4DhWwVxo*Jch`sW+*@`` z_ak7SJRpZ@zrTH5oMa}J_!{pz=N{2)H*N16;-^2s^hBQjFPN0S{9v~~X*yzY_B#zO zZ`@+Co5ek=JsDu`K7U@w>p@27n{aZ>nzEX1pWoc#*^kkriEAA7%^NB*>>W^ey;Zpi zK!h)^cg;i*qx(Fqr!ofnW(o(Jlf!m9yX8!vY0LMzT4C!J!MLHRZ~Cm6X}7Ig@)HLQ zN4^)s3V-w0A8ldnFz_#kX$F&6{MfvW3#FaG49`9U;jg#Mja*)<+B@LVi8>dBl55q- z<(9ei@FTF_lM#&RYYcTxSBh`d_^9v-bF)Asgvwz@xrQ-KuWBg<$S|DWP7O|s(zdQE(#);lqcVpr9 zSKNgW-))N`jHq|DB)ATJ8H}+79&pVt6y$wTZJe&42aC)hH};_9m($#@|E1)$CS3N4 z`O|W9wY%3hVY)?s53f)8=JJ$umzkl$!eV3YQ)MfaYwE79zY^UoH*1k01Af^b>H%ZG z^-DO;E}HCzW9!w$_j~-7$l*4@;Rv(b4R1>?|7ShTT$e0)e4>665*$kjchBvGYlW zVFf{88Rp5xs_ysr^`=9=Fi?M47nbk1E?9R>W>`1R@MHqzN_m-wSvrhkCVj<4pSw2P z9)=TJ^AcaxXRvNtuJ_T1AAF?ccXZ%oE_l%9(r`;hs!%jQG?KAQ^?y|NMm0=%m zDp3wQk=5Rfussmr&7R<7&lQCop?gBz@77;ie_dPVir%j-KZ3*88_esm=dk1WcPGAg zto?*Wm=AMA!|Wqb!MEldKGJdgGeJxdqsAN-1>yD|6?!3WhqDhm>PHM>j@5nhx#9SC zj^p2-XK{?-drRD44zlS_--hSvOCM?YJ?{7N{K3&Z!TxDjURSqu!?e!HYXw&1>@L0Z zZ=-jKj*UzCrvgQ_uG{h>He8n&ugf-VTVA_iTHV%la@cN*S^%7Rg7*2Tf+kR*!tk*_@q85UwF!pw(p|nk`ns4bNmF3u!6WrJ!9# zT^44B(E|fR(rr2R^(;aba*?6@{ZjXVY_1F|9y?hWL?q1gppPxAM3zE_WC}8Bbh)$x z{n%R~yGzrnT4THQvNK6vTcWBi$4ecM>e*PrOhhnvRW%Hq7FP?Yee05N4RUnp3c%t4 z38w?h+SS7nbYPivurP_2byCduQ6FY!VI<&E`djO1pk75!^k?zAa`GJs5iIxC+f{{a z7`Rzd#v*CwDlx~hw-hBXRw<4;5_Hl%w*>9g(~%NK%i=IJp!MrN39~R2^?_pyOs5yO z6ge2o{ae&O0u#(|U<%4nfdyzK24CVUVu`~Yq$8g6B#?oOWFj+J$VxU6$xaS(QWUw! zP0CfBZ=4xqAJKL2sICSTTqTeI diff --git a/fonts/KaTeX_Typewriter-Regular.woff2 b/fonts/KaTeX_Typewriter-Regular.woff2 deleted file mode 100644 index 771f1af705f5cef5f578b3a1e7d8eff66f9b76b0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 13568 zcmV+bHUG+YPew8T0RR9105t#r4gdfE0Bjrp05qlm0RR9100000000000000000000 z00006U;u$!2wDl83=s$lfunT&H~}^SBm;sB3xWm!1Rw>8CN9CW*AUKZRb(vF-L12^-sRp4kkW?yS(-j4&mT7M`-Mm+~H|D|J~(s zx%geq;*D1(>ArFW~rrE6envo%`l% zO&%1KVbFMCgu$9D>Vhor_p7zu_xgZnQd6^Hr;Yl38vs1CA)z7xl?8(x!jsR-@WGX-^qjEyCu_uh7 z*I^gY?D-X??S9Ph4`*u;DbmS24lMp0i)^I~rpgtodMf)%0pM!zD=q+k>MsCRbH@(- z*djQscm())^5fs_Q}OsZfs<}Ca@=XAhI-RiE3ozs0|0$%4*;FwG9?G4Rt|A9A}!%eLthL~ z5hhYIlz9=7#fhLTpzK79Hts?j8WWCQfh6zi7&fdo>H*Dy^`wGqe+Zaua-BoP^#*kY z3z_^znGb}NHKj3Pq9&3}l9gHI(a{W=QeL@bkbp*+=_Htdm(o$X9YqGJ01gn@2p|*y z0zI{2&_qe=)m}fd*%BKaA=oLEO*l8gqOn_# zPoOow3G4Z`O&=u8PbWhJ6^9~s9Uvh}A{)1{B_X$fDlVHsH-j^5HaNj%bZ6Q!;-^Gl z@?y|!gCyYAg>S@lK9Oa$%UVw{mh~uOoA__b- z6Qm;q`)u5Tut+)VDp`kkf-+s%4T>DP@&Mu^AIgYq-U=%_>xi*s5^~9uDv;S;Q1m`XrT zUKx2RO&Bu;GwG|9CQf0Q^!16R(*mvNZ8Mo$umL-4#15OV!)ENT1v?y# z9ge{c$6|-$uuCQ>RcVWovm@ji>M+YXk%gtmk}~&QV^t(aB&QBGB^nT=E~i<3zZWmZ z3(a6 zn$mj_ystCK!Iic{wgNEU*eQa98yRh@2y8{6%}jEVC#rHLtU^u=m7s%xdaoh~;lfhY zc_TE4yXZ`VBp0XR%WbQ`C>zym?nl~OTeK{eJoRH!1;pZ*!L>9dg^MJEES9^1it)tc z=`G=Ynl%i8^*?UOFQRJ)BQ=Z}WGnSRRR=aIBx7ZC(wAzvO zD6-?cnO;Rs%(?|KZAa$J30Xj`gw=<9QNU!Wk>GD9h-Nhau@L^+B=dhxp&yyn@<-O}{5 zE5*kHssQR=MuxChqR4tt=>lLfj@8u9Y0O-irgcmcXAYHX4Zzuq3Wg{s5D;SKDqIf#!G+&Gn$%yuHMM`PzX6+JO=6 zz(T`lkq6OY56Ufpl6)-H`2b#~^RZm#W7IHO;X$<0gO*n%>=%B1`{CoLYCi&Ve04^> zN?%{^jvvRm#yO^n;SbrjB!&SXP*3XQFH#LP+;ad>%>ZaGr#2M8Il(O4_Md}`1B${N z-~vY}DarsO*_nHD?kZ#;jShxR0XaWF2-x}U&vQCcwd4Frw7gBEB9iQtl!^qTgpx@E zxJ0Fo>eDGP5k<>lazl2sG?hw75J(dlkw_$0@Wi*OsOd) z<;h}WbWmEG?f29*1e`jG)nnRhNxZ}wEsGW8dW4iuq!A`n85;6gNung4NDbv=rnk4( z_?&`5lb8?_5@CWSNw1Jnqz01+O@%gvlvJ3!@j8{);i!;GTAH*fCRxZ8B0EJGxDkCR zuH(ssrD<->mdv;jZU)8?Cn2tv#FRr{Rtw9-MP#yS#O8yXIv>O_R0#w+uR|0Rj(&T> zeJ4$=5U6IbCfYkh10Xvefi$Mz)$xvVQTs$8DI-oYVT!v3=Gv@&v?9tdulZMlFHSQ% zwUGgRMEXf!_YI8z%St;C1VAvHmZ`6r?x{Jj3xxh?bMI zLt79$Y|&_S#X<4jUp2)QmJ{)8sD0tpBi$=WsXa}-&L|?js#Zgs6pAON4`IY#lIlrW zmTQ54S=XP#5FBzsvZW3@T<4R+rDtHpb5k)Pa;N;%uV=KuS?|6 z^i1#RRV-+FB%2;#K00n^4BMito@X{Rebt~&fY_3z+qWQYv$qZd?3Aq9m0#{w&7X?G zbfeW|jzTxXH_*Tq>C|;8UB{viS47ym=GyGh$`~TiAB31FaGf3}5b;Kd?rh1RPz8k> z)8{InUV2()n@t9K1WM#eaV96(b{V6H=2Ymed9yuzJz~nCo~JuWnxypK>3-ioHKk(2 z9x}kj0sLRdCWSLDdIo#L?c^$bIdf{eFhq=Jg$hQ9n^j4sLHjn18LwQf3z2C>>DltQ91-pXfi zjPe)p*t7t|uVXEE8d)1Ns$GA?wLE&Ylwd`;!xpRe>;{i!yxx7g%Bil&OS%owo|yMJf)CgRgbF%6aG@`kjCX{ZWw~H4 zxT$E=PdMKt#G_ZE)?mtr4Tp~;+x}3B!>-*s8hmyLL{75bc{ej0BcVSX{q+svv#xha z-t*lF)}DwMt{K~~auT|#?7n4*dGHoucJiC7+{^`7NwaDe>{u}eOB)1vgW|v=*t5Iu zGVpC!q4@QF1^wu9qTk4kTz&hpwH!L^6*D|m*WbU8jB%5bq4wyJVrOwM!o*ik1a^lGY}TY*E7$)Hpj6c(POo|?!PdkhQa zKYD)m z)$q&rA#NQQzPC_FMZ+jQcKfg$Lr=pyXrO+@)2}GFqb%vZbBN1J0lLc*6I%mt!bQFi z8=zx_#){UwFOzpPQY;t#(115RPD-M%WTeYHphHbu8Cwj27^zVQwFW%Y_f}JVuj$#$^@%6vJ3V@FAy(l}<#PD+lW71S{lKw+lLaE6h4N5dsYTLP?krv*Bd4hl9`=Vp;B z71E1lm4q~52G|=#UwPX`1J#7Zyi`>J9!los7cl71fg~|NH@=jRG^X!KgKCtVNS#x( zO-U|#`_%|Ev{9dhFn}|Y(;HjdysA^6U)omF?&^9jNc)6tuPUs)oE!EmfXGW8p)prT zpB^pPmn2i6?m!UOW(ijn1=Q0cfI1Lnavm-ORV%;)CV*AI{4vB(ut6;(WjiB{xXlGY z+oDFzKv11HX;1&Sl{V@`g?GnZ&s67rGK*=*D*fd%sB9KoJ|5b!58`n3(n9-2)gW3c z6A{n*ynO_sZCI`Oq!~7g@`rr*i+&d%qoMXrE1m6%c(+h)4AQFa4_gFDCg;vYasE+X zH4}tZk?$I7U~uuAvxaC9^?bg)lj*d>RdO66bL?EcZg;rhD3Jc}Y%aiddGVCH7`0Y_ zp79O>JdaMKD>FX?W-;G4mX)@O*Txbavf)&rt0CeG*^B$j$8I+(h<9d$)qPzol}yI$ z5tL%j{RoY~LZnL4Lpl>9z5thU%b)Y~h(3+LQG%B{C8)CNLy3%pY6F5S(TmlH@CM4; zo;&h+&~MP---F~o-IPc=vAKrIT$y=}j@AbJln&iZ&KuuvS0m=$lv2D@g$mw>Q95d+ zP(ei}KRM2k?Jnx7Ky3dDSD&>bKLACf*v>L%rs0IOt{IuAV9Wl82qX0Ft&9zo%WmO#X9X)@LOyJ z0uv67m&>@XujfPv7M{eJK>QJ>;<+^I_ru}=i$*|by3GPj6#}cKQu9m#D5DqdxgUA6 zE<>I)ck>-dr3u(r8qqz|_`iY;k})m1uu>!wY47Jl0E`!vzc8tn{^mu{Y2|d(TI=4` z;QnBlvYFhv)eTW)WU5aysv^W+tt%G<&!vbtMQTCLsD&-SQOIw?S=L=zybq(99>_&k zR3i?(1TG^lP#I0%Pm)EKt6X-gY8-%|GAZj2h1+Yu%WA0Qu)VXal%&x?d3H7B5fEst zc=@(18SOa{nj&-r0YkZ$YSMA>G?GvE6Bc)VHVjPBNw;Li?}M}l$CY?W3D^`|pdG=jFB|2Gx5GDDse``9o{6}tPd4*Zb6so!Z$ z{>q(|MU~gfn&$3l=tbQW-wNf894!R*$zJ^om+tN(Ik3&Jo*vJJ zRlhh6Gl9!KqoLAE>*1Ipj@$SplvO$g)T_{_74YLqEpry2q?N?|h{P`Q9{lbtsOx&T znWvIXc!Ye~U%Z?>>Xul|B#)CwWr%u(Fj==58#MQ!*3RuB0p%aKk z%NGW`Im2PO!J}ZhVc4E0qgGwR z=tcUJPy=7;KL#tRW5jp@3F8>m#Bd_R%6K(EX#6ubv{)9<{%p&dJR7diKe6jeEhbkv z3J~mKs>g+~yqEOcOa7UJ&W+=nVIU7-rXi+J7Ll|)9WkAHT zD3V33(M;v@ktQ*yD>K#Vz^g?Y)PPHy2yA4*7`98L!Jbie&E}UKv7TV%&>qB|X4%Me?xUUl=>zE`0cQT_Qw-(bOpL*!;i`%=Y>-PR*(^R+sQe{U-xQvaeY? zlNy|FW320hn66!Nx<6?j8K5)51PHASPYy+`sJv}{3u)*qfM~1Ejc3WGq}W$Bv<^vo zohsqlaxbJB(+Qw~&d18nnhn|SxHlX2g@$r_! zjHggV#BdlCaA15Cf)mD9G0I3VIoXlQ_fd-y7Uf7K)3|VIim-J9Ew-!LVO8qjkb>Hx zGfb`=p8z_DDt#KoMHEAS3`v3k>LhMflGFZnLn*1^oXlWEdmc_ntu^jRgIzhPdQZu` z%Tkxqfgson8aLEaafQ_h{?HMpNT)Ka7^1aZLiG+Jx;?LYFopS)!S6;ax+^=Dy!%&L zX<}tnn(j3I=&nX(UZ~a$ts@?rQ0Q52^Zqf$EgjJbpQ7mLLW0P ze0hn@Qk1E~)ZUrJNk;#JHjz4IW~3wqEe%G-Sx?FX)TxX?VHe zmjl+qXqp21Pa3}dN5UEk=jl!4&^nyKkfPY;fmjPjoG9Y4MJxL zRyH&5l8Q>TKW?BS|2uTr>@zC`+GweM*Fg_z{IU9Epx^5ETjOz>U{;=4*r3|k8s8CD z7h8q?!PB*CG$M=;2{{}Hf{%!88&UiT8U4L2oC^4d)_e>7K*=IFfBGSjnFB!_j!;Bk zB8|3PidRlw8=3EPt*QD8p+RG&Cp`)0uT-o`R938fzp;7etloV=X+>Pcluzkjr#9cy%dsi$r4^mV z!q{Lo-?_^9Ons?iapDy*Hu|FMc9Vqu%ytF&)Lb@p!baFO_4CuyLX2A3kT@xm38keU zI|}LTtIqcc%WH-=8Gk>OO@ z#n;*nHAswE^#=;6&Nm`i6j^2>qLamz3RoMt9XaGGC3>q z3^!EOO?NL>q3i{Qe#i3l_2#U(VwSVBwcEE09y zQ@^Ei7F~eb0QQG7v)Y}NY;_jy$4mMrAC$>ld$KrNw{V*8auJ*!*P4juK_}snnGqhM zY?ue;y#{R>%Z}E1e4TCymtQ=mt7%zM^Sjnh82SfBHk*Y1GZT8q?TjnT31p?q-;s-~ zxfX5BR{0;ydjYD$}$t< z<{c6(Bn`ocDJ=@E_LgH4{5X3;lj4Kv&kqcJEtHK8DJa`mfJ#UtJB`Y{rNU@NC@p&Y zU-a{DbALfaJg5)NnsCkxmznzgg4X(+1c&>5TxZhF0b7d?m^31G%X=c61!?H5& zvu>9G2UdLG%|)MjbS7U)yWeJs3E1iawxQOn5?7MQIp#}F&MNgJF^dcZg5~hK_W0qq z385QR*yf&h`a46jN=o0PX?$K;;Kv0=^c9odiD%EV^7j})%PVHPsxX!4u>lZc*-~sS zk6N;LG`dg~=eGPb50T10z>ZEz_ig)-)GsjnAWbivk{wl`iJqEVwk)C&e)6gE*_#0L zaIDz1dTFH?9Sl|7OnF87iam7GJsp!&N+s_Q(eK2*_YP{Fr#!ptw*8qk&!~5tRVs$9 zr%!FA6t}U4bg{=p#(H0o;sy!U{v_ue^*brAdo0wB=KYx4lOG&x8nIc!Psf$T#mgny z`G2#_%{5x1hiRJS_+~YQQ&kaPq(@9&OuDe(S%p;j(eELd`WY5)o3ngxL{K4Seaj60 zJ@L+vEv2aR`ns6%>RI_}#kJ0b>dMJaHdoaz@k<8ibk|!d#%7_!6Dftl|FaTjM6mMp zo=}a!_p(bMnf`*-6B{o)2yAlO+t{gqLdvLETX|WHR!TPP(R~iVeZA{?`(TIz3w3)M zNU6qOUT$Mmj8s9wApJomC%TLYX1dZH(I_968_26~^8mzCD_5|yv*3O>i=C|;#lp+! zKO&l)VCm4NA`+LaISE#+2KzyqeC|)c5Nq?TAB!!l&d@yjy*vBt4msK8bsunCZj2AE$7ju%d!SMHE9Nk7E+|}oTfz)d4UJUJUzB2a znNVf^F(d7KVZq#iT;D(WiP^3sSuP{jGMvElDQHEFR(`*oq$ViY;C;Ea1}vBd7P=+( ze2ptt6jVQOiq}tzuMaF;QITSuNOitfI17{IYHLuGR#(JW*-Ih|HB1G@Y?NXsqK-0r zc5o)n5^`B+EI_Ru>@v#YGbjFR#|JB9+Fq(rs_DkzS`FT`JH*N-eMn)h7}96vx)?Mn)+@(-miKjsr%2eVYR=H$!II+k{d zK7aiD_LD_hz^N^SiVfxEPvqx?Se3TG`r;m9souv`pw&GtTXh;er_HTFI3nE1sKnEk zcC`rQf5o}{o;b#Fq)@u&q8&#^B3ij1*4LVB7sxf; zpd=7b%I^=#sKHVbsOzukLq4HYY^cBwd<(Qww71SzmlRu4x(e611afuV$jQ|tebJ!G z=^0P+?U<1>IT}A2A9hXd{s`b0%@ZHR<0d03oW3BeXwIv}d;?EySwm$3f|Y)Z9+R+T0%7 z{mTEpicZ$`nnvml=N_(m$;|#vMz8*VY~uvFJ>Vn`gtUQ%U6oJEmBq8$--tUwlY@lK zI_KsKWJ1-){hLBct#!s|N9(Ncc-%=@EmGgcu7I;k;x7X%rV#s%V`0BU!2I0?<( znratT;d4JHXWNm!qh8+?H+4nD(cG_ck5;Uhik+G%JnL+W5O1BcJHd>%i_VFfpaSnt z9~V<}Bg?lI-3i~h^UgSADdkDO#C2Lb@Nd`!n?4X0YjR6ed9o>Q&xm{?4n#T16b^0= zKT5>h`5Q8Ic=HdwygME0q>y;$6A@?x-C<_fup8DJ{vB zzwG(qR1j5kPz?eZQ6k|!M9#zPPm!l&x%c|49iC#mLI#R4(zC3aNH56qu6|pw?^;lBdJCQOr z{p=+AZ@UMb_p5u+mV&m*A9O_nJ!lBs`>M(6L1Vo~TvAp(u8ac%4tU`5nV>Fs=JG&3 z08fqY{-Yxu5^lr$pp$_|UBAjKjm zN!BDOE;(3mutZWUYf6GdEjmTh>_t%AQqP59vu3CEO@mXr)4EyOGNPrWj9(1naSR^2 zef!0am-2rz602{Omf)$PRk5~iYd7MUl|LuU#DGu6R#sM{HC`P7<}!B8fNJBVq=w+%K73Me&<734gPI32j(!oXWxSO#3f3)6<&CA3n3S@ z(@fa8?beq)^5rW4H&&B4g~Yz++xMvpoEMi%DsW>weT3K}s}*2-8-GqnC_oWkK^i~$ zWAOKmsnf`^6Ry5K_<5z(OsFC_5UdEX>Gf#V28ju$$9jtPQ7j@(ldzlSGo29@%@0n> z+hV@w3Z~VJ67Hq}^YezQS+zsZ>2fcaF?wgxN)(Y^=`V|Fe zW_A1V;pT5qCds8^uRM-#_ITcT&W4TOyCCS;9)Ys%1#|pJ2#DNV`E?05JGGZ`V(KO4QcNdwk5qL={p{=zf zx(usm%*6HNn59$ zvJ9Ky&C3IhW?4>u7kGo*(-7RrP=vy zL1zlt@-0o;ER=9#Vk4@(Ro}O`))BRI6!*hsQ~%@qCWX4rk#A#J{<3;kw6xAOwbGyM ztx543{pLY<7&^9}5IX;MmScavxlVvqLE&z+1{D!o-h3838+)%lH#aAvSiko;OA5w{ z8myUtSrrQRl~{*s+8o`hFRd&stdQFx&+fqDR)UphdbQEP@0&9m$7^Aho}gu?q7Z@i zHb<-RxSH{eTpl(jyV(8@=(@35reZ_cIc!FHh(&VN^Vz zkZ?wOlDn-n5L><^3nP@$unUrYPWi#c2W6gIM|Yq=uvovq>-HtP7I`v6W_fHw7ZMwj z9Ao~~5-ly0f}i{Q4Nu*RXxM8Nf%I0>Dw@mw>KCM`rZ^^abP3v8VTsFpWudy0sdIy% zhMcXw(EByzfE3d|1BpKzl~Ho6TLGF|_S{-mBIvm!RwHMUXhzE_Bny8h)|_6&x}BgV zw+6JeiY(Ob-FdluH#gK^$dP+7E{aiTx6fcNGHAbE*>>+l8F%b_aUrPHXlpnep+rZ? zMcpC`_4V&v!qr+-N^HL0D^`4f$=c&rw0m;;I1h~<=y9JLT})r ztGX#A@qTKe$-!4kMjAXiO^jR~D{Ch0TRRE_4D>mqF&uxJ5+ z4*m4I&A6X8y-VKoB%z;_!ELVJekV}QsA`HMH^kBi^j7{fL#!#XXcN`??=v>)^9VY9 z*zG8@&FHktW=6@f*I2`oWxq;tY~?9qFzUvs9W;^qW~y&s0+ zE^Qxet|y!x`eJjcI#jn^pYox`CS3T>?cKC7Y%iPsX5+NsG7P?q_zGtVWrUpmt|dwN z=AGr?+1dine9l`wZJMZ*7g9LNLut~1cRwD{uu^TfhF?=uid}pI@4~$@GY>;$9#32T z>}C=D9!+kx!(+wmHh&4%<#6VQSe3?~8PO`IwzD?y$IXIrd~R-enU#Hv8-41K;vwy7uk&Pk4b9wvX}07Ls{t#|wAtZl|4_L1?Am4< zA1+*iT2MEo2SJ_LIf621*$~PzC!q13axUS!r!oFAX3B`~ferTdJa~4VBQR2|uAll4 zGy+$9ckj+`LO*#!{u5rOOc_htO)gAbCy)r%r7k2nnIB#`647YWU6qxUhC|W`D=)j0 zEh)7$RXOyR*3SGwYYVJZ!H^+tB`B+0`xeawf@HdUmMo)(l(iq2lU~JEnlK24xtw^_ z%iSDEe^zJ@ME*AY!h8;?#?&v84TlCvCRk80O1H^*D2#~MuDLyaRlmGJQYEQYjX`1b za+}?g?16Y!jVd-2tSo!yq0=Wjtxg!awLaaC>jpS?+$*&j>XKdv#k;Oe{`qGoPyZ>c z@xO9%jZEB9x!Ijom|6(+?6SEGx;D0^G6Wj>-p@mS0FZsDd+&YKI++fts)X4SmEjOg zFU#^C33B6Ja-W0pVeZS-^)E4XzsQwP`HGjR=uW@f&lrERu;&^24$YBK7J`?$DpMXn z`>)TVc|3$en25;3AFD6Z>S@ibV3qb?L%F09m=frBi6sUfE#L|GaE%N+`stM~Rr(d9 zt)!Kj1_T~vucIn0tFgFr{U@eKNv{HQMojmLF>46lP(;ZHs%QfqvKC|a%w3?1YfU>xvx9zpXvWN;*VuN@aS8qM`4QwZ>PFh4gd?c;fK4Ah@yy4|q24ARrvB)S*Egx1-``*;q&b~G@(`Fxfo$lx| zem_k;yquy(tI^Bwdam)vaYTCmKXG30$pwiZ;&kqed*i1NZOV;`d3smx)Pauyq? za||!z!$e}zZ?F>rqW)Vi9P0Hf-Ou zO`R=bYI)>}_43z#0(Y-pxATccy%A3O!$nF5|K$pH4HPd>5G?KO6&}b!{pO6bx1t>l zS!PUBS(yXr&+>V<-aLON^Tgfu3j*fu;zbFvWr^;)4F5f}_4k8YfIiK&XZNzIKB2lE z{qnBVh?8G09gTrTI7BTjJhaGAMEeI*~KyLu}cMi<2&)c1=2lsp39XZyC`fsF0Pb{7juPEzLKfHr`N@6JM@?|_2hIz||Pg0XBx<^PDIzR-isrRE%0HNm8 zM^++u0D{R8_T(N438v3^g46T@$|8yRZdGXTTn_) zvG8)JCMt(#nL=_`a{t+O`p&SJ78>UCpjHK5!7bMt%?1;v>2E>5z0*GXbU?J~iQ^N! zPZ#Y_`nf2j)v5rSh{?OkHh-@z>HG&HgGICP!DS6bUBtXKg^>j)DDfb`C6ih7>p6 zL{M_aBo4w#ftpyrN1!L4RIMu)Ga%ez^3Zlw_|+heVgarZDB+;k6doF-WS8zbIEiUd zo-%R~7Y;l3=wX<6#0On?xE2e>*tR&D#i*Wor6Jn`t-QA*SD-gVTu%* zOvDdh64-yNqN3}KBoQSo5UW()rxt57@{6&3;xxZwTLPh7{FKV8zAyFQ#DuTwpRI_6 zTC6adgcJ#*>$bdZ?Jq&U^1H}S@qRO}<}l}(sD~M15x14w5M2-%&<#WiqPY#+O7ydt z{U=s@-3(r?l__YUfJ;JpFe(;~ra%Ur>1*fLGC3u{Ob|hg%0~&0kkEtEorfr?0EX@H zhqSbitAC=eO8L5nTbjyB-D-|K-YE(eyR+i-YDU84wp(;H*OX<#iw8uRKH(}jBm^QE zKxB}J3xbNmd_E2xQqdyHB1(yvc%tD4DRu_99JMIOO^t_q792U8m!6WO&^>f0tMbJX zP?EBgvG~*hsi;m%D#coam`+KQNiKQ&R-|6?Pg7ABkGLoHWCnO*dD7D+J+9w+Y_d7m zBBqc)5u)S?4nz$}9O^R2s;FnY+d7nCxnY3~2BH1`hxVD7%^KLEhMfLct9^%gah*g`)h#3xT%i2LlU6gqr(_&>O4Hj`{dYJ2Tb%gf?5S&qpT zr$ed)8mST`NR;H5y|P$jaY&#>h=C(9EO3Rg$S8Z{vWu$>9WF?l;|A0t^Fpw*xfRMv!C>hw@Wm9Vs046!)dDTxH)~?8LGnC76NG%%$ zqfAbxi^Y0E^7U1pq+u9=SCD(2aG}8+?N}o8Kz5(+CIRP*+veQ`(`^T4)QFXr=;H zGI}Th)0BMDqRe;IUMow%&r#FFU3xHbgvPTtq9`Tv9R&PLef>N|ssVSQskO?P-g7p~ zCP68+rc(M)Q)A_{PG0t4uk``s=9Ky|tHj?!fYT&uyr%rH2Oug$86&l;xQbg%1sU$h z((YsLY{=2FbrpL6OANW^RGADzoFi2Ao-%5GAY(ZK3+XjQ*)r_%_0uA87vTg4I&Pv$ zoo6EjC|)u+L-Od-3K^M5dE#Df0?|i}8RpUlfSMeYDo)~Pn%b!ioPy+FA=Igdonyr> zddn5~@*@l?7Ly%D*}m?zrvP$*^Z7LsK`I4|IOrYw z%mma?KlxW&tQ{4jgu_m2`QKu8p+*0;IPm|AA2Tp~0zx8U5>hyV42eQxu;dh!RMa%I zqUh)u7@3$^M2itCPW;COo}7PvyA-L?q<`J@XH1rCIdbK3$mitZ=Hca2z^_n|VgV&e zl_^&tD5O%AYBg%rsn?)UlV&Yig|!7{4o_O_Q@4`G|9p`Gi!9+IAN#~@pQX$StE{ok z2AgcL%?`VG)8X{kFCnoNzFO_9xoKwAS?i6?bc^0(v$pf-24xvVl^VTf^vTf{#*Uqy z2?|4BK6K@y51!RkerYBzsY>|D@!>0@POF>sV*j)k?p}&|v)%}_ZsiD^4F!exS-wI4 z&a1bt3V0_?49+3t+y79NTY0JW^O%c+a~}T5DG&LNQM9%p;XJ@uIIA854zN}e-)`N9 z^KD&^4pNLb!qCDvSBysY87J7A0?M0fJ8nOQ(}aI$%AE_+Opl<`rO1C$>3SRP;Zm{g G0ssIW.newline{display:block}.katex .base{position:relative;white-space:nowrap;width:-webkit-min-content;width:-moz-min-content;width:min-content}.katex .base,.katex .strut{display:inline-block}.katex .textbf{font-weight:700}.katex .textit{font-style:italic}.katex .textrm{font-family:KaTeX_Main}.katex .textsf{font-family:KaTeX_SansSerif}.katex .texttt{font-family:KaTeX_Typewriter}.katex .mathnormal{font-family:KaTeX_Math;font-style:italic}.katex .mathit{font-family:KaTeX_Main;font-style:italic}.katex .mathrm{font-style:normal}.katex .mathbf{font-family:KaTeX_Main;font-weight:700}.katex .boldsymbol{font-family:KaTeX_Math;font-style:italic;font-weight:700}.katex .amsrm,.katex .mathbb,.katex .textbb{font-family:KaTeX_AMS}.katex .mathcal{font-family:KaTeX_Caligraphic}.katex .mathfrak,.katex .textfrak{font-family:KaTeX_Fraktur}.katex .mathtt{font-family:KaTeX_Typewriter}.katex .mathscr,.katex .textscr{font-family:KaTeX_Script}.katex .mathsf,.katex .textsf{font-family:KaTeX_SansSerif}.katex .mathboldsf,.katex .textboldsf{font-family:KaTeX_SansSerif;font-weight:700}.katex .mathitsf,.katex .textitsf{font-family:KaTeX_SansSerif;font-style:italic}.katex .mainrm{font-family:KaTeX_Main;font-style:normal}.katex .vlist-t{border-collapse:collapse;display:inline-table;table-layout:fixed}.katex .vlist-r{display:table-row}.katex .vlist{display:table-cell;position:relative;vertical-align:bottom}.katex .vlist>span{display:block;height:0;position:relative}.katex .vlist>span>span{display:inline-block}.katex .vlist>span>.pstrut{overflow:hidden;width:0}.katex .vlist-t2{margin-right:-2px}.katex .vlist-s{display:table-cell;font-size:1px;min-width:2px;vertical-align:bottom;width:2px}.katex .vbox{align-items:baseline;display:inline-flex;flex-direction:column}.katex .hbox{width:100%}.katex .hbox,.katex .thinbox{display:inline-flex;flex-direction:row}.katex .thinbox{max-width:0;width:0}.katex .msupsub{text-align:left}.katex .mfrac>span>span{text-align:center}.katex .mfrac .frac-line{border-bottom-style:solid;display:inline-block;width:100%}.katex .hdashline,.katex .hline,.katex .mfrac .frac-line,.katex .overline .overline-line,.katex .rule,.katex .underline .underline-line{min-height:1px}.katex .mspace{display:inline-block}.katex .clap,.katex .llap,.katex .rlap{position:relative;width:0}.katex .clap>.inner,.katex .llap>.inner,.katex .rlap>.inner{position:absolute}.katex .clap>.fix,.katex .llap>.fix,.katex .rlap>.fix{display:inline-block}.katex .llap>.inner{right:0}.katex .clap>.inner,.katex .rlap>.inner{left:0}.katex .clap>.inner>span{margin-left:-50%;margin-right:50%}.katex .rule{border:0 solid;display:inline-block;position:relative}.katex .hline,.katex .overline .overline-line,.katex .underline .underline-line{border-bottom-style:solid;display:inline-block;width:100%}.katex .hdashline{border-bottom-style:dashed;display:inline-block;width:100%}.katex .sqrt>.root{margin-left:.27777778em;margin-right:-.55555556em}.katex .fontsize-ensurer.reset-size1.size1,.katex .sizing.reset-size1.size1{font-size:1em}.katex .fontsize-ensurer.reset-size1.size2,.katex .sizing.reset-size1.size2{font-size:1.2em}.katex .fontsize-ensurer.reset-size1.size3,.katex .sizing.reset-size1.size3{font-size:1.4em}.katex .fontsize-ensurer.reset-size1.size4,.katex .sizing.reset-size1.size4{font-size:1.6em}.katex .fontsize-ensurer.reset-size1.size5,.katex .sizing.reset-size1.size5{font-size:1.8em}.katex .fontsize-ensurer.reset-size1.size6,.katex .sizing.reset-size1.size6{font-size:2em}.katex .fontsize-ensurer.reset-size1.size7,.katex .sizing.reset-size1.size7{font-size:2.4em}.katex .fontsize-ensurer.reset-size1.size8,.katex .sizing.reset-size1.size8{font-size:2.88em}.katex .fontsize-ensurer.reset-size1.size9,.katex .sizing.reset-size1.size9{font-size:3.456em}.katex .fontsize-ensurer.reset-size1.size10,.katex .sizing.reset-size1.size10{font-size:4.148em}.katex .fontsize-ensurer.reset-size1.size11,.katex .sizing.reset-size1.size11{font-size:4.976em}.katex .fontsize-ensurer.reset-size2.size1,.katex .sizing.reset-size2.size1{font-size:.83333333em}.katex .fontsize-ensurer.reset-size2.size2,.katex .sizing.reset-size2.size2{font-size:1em}.katex .fontsize-ensurer.reset-size2.size3,.katex .sizing.reset-size2.size3{font-size:1.16666667em}.katex .fontsize-ensurer.reset-size2.size4,.katex .sizing.reset-size2.size4{font-size:1.33333333em}.katex .fontsize-ensurer.reset-size2.size5,.katex .sizing.reset-size2.size5{font-size:1.5em}.katex .fontsize-ensurer.reset-size2.size6,.katex .sizing.reset-size2.size6{font-size:1.66666667em}.katex .fontsize-ensurer.reset-size2.size7,.katex .sizing.reset-size2.size7{font-size:2em}.katex .fontsize-ensurer.reset-size2.size8,.katex .sizing.reset-size2.size8{font-size:2.4em}.katex .fontsize-ensurer.reset-size2.size9,.katex .sizing.reset-size2.size9{font-size:2.88em}.katex .fontsize-ensurer.reset-size2.size10,.katex .sizing.reset-size2.size10{font-size:3.45666667em}.katex .fontsize-ensurer.reset-size2.size11,.katex .sizing.reset-size2.size11{font-size:4.14666667em}.katex .fontsize-ensurer.reset-size3.size1,.katex .sizing.reset-size3.size1{font-size:.71428571em}.katex .fontsize-ensurer.reset-size3.size2,.katex .sizing.reset-size3.size2{font-size:.85714286em}.katex .fontsize-ensurer.reset-size3.size3,.katex .sizing.reset-size3.size3{font-size:1em}.katex .fontsize-ensurer.reset-size3.size4,.katex .sizing.reset-size3.size4{font-size:1.14285714em}.katex .fontsize-ensurer.reset-size3.size5,.katex .sizing.reset-size3.size5{font-size:1.28571429em}.katex .fontsize-ensurer.reset-size3.size6,.katex .sizing.reset-size3.size6{font-size:1.42857143em}.katex .fontsize-ensurer.reset-size3.size7,.katex .sizing.reset-size3.size7{font-size:1.71428571em}.katex .fontsize-ensurer.reset-size3.size8,.katex .sizing.reset-size3.size8{font-size:2.05714286em}.katex .fontsize-ensurer.reset-size3.size9,.katex .sizing.reset-size3.size9{font-size:2.46857143em}.katex .fontsize-ensurer.reset-size3.size10,.katex .sizing.reset-size3.size10{font-size:2.96285714em}.katex .fontsize-ensurer.reset-size3.size11,.katex .sizing.reset-size3.size11{font-size:3.55428571em}.katex .fontsize-ensurer.reset-size4.size1,.katex .sizing.reset-size4.size1{font-size:.625em}.katex .fontsize-ensurer.reset-size4.size2,.katex .sizing.reset-size4.size2{font-size:.75em}.katex .fontsize-ensurer.reset-size4.size3,.katex .sizing.reset-size4.size3{font-size:.875em}.katex .fontsize-ensurer.reset-size4.size4,.katex .sizing.reset-size4.size4{font-size:1em}.katex .fontsize-ensurer.reset-size4.size5,.katex .sizing.reset-size4.size5{font-size:1.125em}.katex .fontsize-ensurer.reset-size4.size6,.katex .sizing.reset-size4.size6{font-size:1.25em}.katex .fontsize-ensurer.reset-size4.size7,.katex .sizing.reset-size4.size7{font-size:1.5em}.katex .fontsize-ensurer.reset-size4.size8,.katex .sizing.reset-size4.size8{font-size:1.8em}.katex .fontsize-ensurer.reset-size4.size9,.katex .sizing.reset-size4.size9{font-size:2.16em}.katex .fontsize-ensurer.reset-size4.size10,.katex .sizing.reset-size4.size10{font-size:2.5925em}.katex .fontsize-ensurer.reset-size4.size11,.katex .sizing.reset-size4.size11{font-size:3.11em}.katex .fontsize-ensurer.reset-size5.size1,.katex .sizing.reset-size5.size1{font-size:.55555556em}.katex .fontsize-ensurer.reset-size5.size2,.katex .sizing.reset-size5.size2{font-size:.66666667em}.katex .fontsize-ensurer.reset-size5.size3,.katex .sizing.reset-size5.size3{font-size:.77777778em}.katex .fontsize-ensurer.reset-size5.size4,.katex .sizing.reset-size5.size4{font-size:.88888889em}.katex .fontsize-ensurer.reset-size5.size5,.katex .sizing.reset-size5.size5{font-size:1em}.katex .fontsize-ensurer.reset-size5.size6,.katex .sizing.reset-size5.size6{font-size:1.11111111em}.katex .fontsize-ensurer.reset-size5.size7,.katex .sizing.reset-size5.size7{font-size:1.33333333em}.katex .fontsize-ensurer.reset-size5.size8,.katex .sizing.reset-size5.size8{font-size:1.6em}.katex .fontsize-ensurer.reset-size5.size9,.katex .sizing.reset-size5.size9{font-size:1.92em}.katex .fontsize-ensurer.reset-size5.size10,.katex .sizing.reset-size5.size10{font-size:2.30444444em}.katex .fontsize-ensurer.reset-size5.size11,.katex .sizing.reset-size5.size11{font-size:2.76444444em}.katex .fontsize-ensurer.reset-size6.size1,.katex .sizing.reset-size6.size1{font-size:.5em}.katex .fontsize-ensurer.reset-size6.size2,.katex .sizing.reset-size6.size2{font-size:.6em}.katex .fontsize-ensurer.reset-size6.size3,.katex .sizing.reset-size6.size3{font-size:.7em}.katex .fontsize-ensurer.reset-size6.size4,.katex .sizing.reset-size6.size4{font-size:.8em}.katex .fontsize-ensurer.reset-size6.size5,.katex .sizing.reset-size6.size5{font-size:.9em}.katex .fontsize-ensurer.reset-size6.size6,.katex .sizing.reset-size6.size6{font-size:1em}.katex .fontsize-ensurer.reset-size6.size7,.katex .sizing.reset-size6.size7{font-size:1.2em}.katex .fontsize-ensurer.reset-size6.size8,.katex .sizing.reset-size6.size8{font-size:1.44em}.katex .fontsize-ensurer.reset-size6.size9,.katex .sizing.reset-size6.size9{font-size:1.728em}.katex .fontsize-ensurer.reset-size6.size10,.katex .sizing.reset-size6.size10{font-size:2.074em}.katex .fontsize-ensurer.reset-size6.size11,.katex .sizing.reset-size6.size11{font-size:2.488em}.katex .fontsize-ensurer.reset-size7.size1,.katex .sizing.reset-size7.size1{font-size:.41666667em}.katex .fontsize-ensurer.reset-size7.size2,.katex .sizing.reset-size7.size2{font-size:.5em}.katex .fontsize-ensurer.reset-size7.size3,.katex .sizing.reset-size7.size3{font-size:.58333333em}.katex .fontsize-ensurer.reset-size7.size4,.katex .sizing.reset-size7.size4{font-size:.66666667em}.katex .fontsize-ensurer.reset-size7.size5,.katex .sizing.reset-size7.size5{font-size:.75em}.katex .fontsize-ensurer.reset-size7.size6,.katex .sizing.reset-size7.size6{font-size:.83333333em}.katex .fontsize-ensurer.reset-size7.size7,.katex .sizing.reset-size7.size7{font-size:1em}.katex .fontsize-ensurer.reset-size7.size8,.katex .sizing.reset-size7.size8{font-size:1.2em}.katex .fontsize-ensurer.reset-size7.size9,.katex .sizing.reset-size7.size9{font-size:1.44em}.katex .fontsize-ensurer.reset-size7.size10,.katex .sizing.reset-size7.size10{font-size:1.72833333em}.katex .fontsize-ensurer.reset-size7.size11,.katex .sizing.reset-size7.size11{font-size:2.07333333em}.katex .fontsize-ensurer.reset-size8.size1,.katex .sizing.reset-size8.size1{font-size:.34722222em}.katex .fontsize-ensurer.reset-size8.size2,.katex .sizing.reset-size8.size2{font-size:.41666667em}.katex .fontsize-ensurer.reset-size8.size3,.katex .sizing.reset-size8.size3{font-size:.48611111em}.katex .fontsize-ensurer.reset-size8.size4,.katex .sizing.reset-size8.size4{font-size:.55555556em}.katex .fontsize-ensurer.reset-size8.size5,.katex .sizing.reset-size8.size5{font-size:.625em}.katex .fontsize-ensurer.reset-size8.size6,.katex .sizing.reset-size8.size6{font-size:.69444444em}.katex .fontsize-ensurer.reset-size8.size7,.katex .sizing.reset-size8.size7{font-size:.83333333em}.katex .fontsize-ensurer.reset-size8.size8,.katex .sizing.reset-size8.size8{font-size:1em}.katex .fontsize-ensurer.reset-size8.size9,.katex .sizing.reset-size8.size9{font-size:1.2em}.katex .fontsize-ensurer.reset-size8.size10,.katex .sizing.reset-size8.size10{font-size:1.44027778em}.katex .fontsize-ensurer.reset-size8.size11,.katex .sizing.reset-size8.size11{font-size:1.72777778em}.katex .fontsize-ensurer.reset-size9.size1,.katex .sizing.reset-size9.size1{font-size:.28935185em}.katex .fontsize-ensurer.reset-size9.size2,.katex .sizing.reset-size9.size2{font-size:.34722222em}.katex .fontsize-ensurer.reset-size9.size3,.katex .sizing.reset-size9.size3{font-size:.40509259em}.katex .fontsize-ensurer.reset-size9.size4,.katex .sizing.reset-size9.size4{font-size:.46296296em}.katex .fontsize-ensurer.reset-size9.size5,.katex .sizing.reset-size9.size5{font-size:.52083333em}.katex .fontsize-ensurer.reset-size9.size6,.katex .sizing.reset-size9.size6{font-size:.5787037em}.katex .fontsize-ensurer.reset-size9.size7,.katex .sizing.reset-size9.size7{font-size:.69444444em}.katex .fontsize-ensurer.reset-size9.size8,.katex .sizing.reset-size9.size8{font-size:.83333333em}.katex .fontsize-ensurer.reset-size9.size9,.katex .sizing.reset-size9.size9{font-size:1em}.katex .fontsize-ensurer.reset-size9.size10,.katex .sizing.reset-size9.size10{font-size:1.20023148em}.katex .fontsize-ensurer.reset-size9.size11,.katex .sizing.reset-size9.size11{font-size:1.43981481em}.katex .fontsize-ensurer.reset-size10.size1,.katex .sizing.reset-size10.size1{font-size:.24108004em}.katex .fontsize-ensurer.reset-size10.size2,.katex .sizing.reset-size10.size2{font-size:.28929605em}.katex .fontsize-ensurer.reset-size10.size3,.katex .sizing.reset-size10.size3{font-size:.33751205em}.katex .fontsize-ensurer.reset-size10.size4,.katex .sizing.reset-size10.size4{font-size:.38572806em}.katex .fontsize-ensurer.reset-size10.size5,.katex .sizing.reset-size10.size5{font-size:.43394407em}.katex .fontsize-ensurer.reset-size10.size6,.katex .sizing.reset-size10.size6{font-size:.48216008em}.katex .fontsize-ensurer.reset-size10.size7,.katex .sizing.reset-size10.size7{font-size:.57859209em}.katex .fontsize-ensurer.reset-size10.size8,.katex .sizing.reset-size10.size8{font-size:.69431051em}.katex .fontsize-ensurer.reset-size10.size9,.katex .sizing.reset-size10.size9{font-size:.83317261em}.katex .fontsize-ensurer.reset-size10.size10,.katex .sizing.reset-size10.size10{font-size:1em}.katex .fontsize-ensurer.reset-size10.size11,.katex .sizing.reset-size10.size11{font-size:1.19961427em}.katex .fontsize-ensurer.reset-size11.size1,.katex .sizing.reset-size11.size1{font-size:.20096463em}.katex .fontsize-ensurer.reset-size11.size2,.katex .sizing.reset-size11.size2{font-size:.24115756em}.katex .fontsize-ensurer.reset-size11.size3,.katex .sizing.reset-size11.size3{font-size:.28135048em}.katex .fontsize-ensurer.reset-size11.size4,.katex .sizing.reset-size11.size4{font-size:.32154341em}.katex .fontsize-ensurer.reset-size11.size5,.katex .sizing.reset-size11.size5{font-size:.36173633em}.katex .fontsize-ensurer.reset-size11.size6,.katex .sizing.reset-size11.size6{font-size:.40192926em}.katex .fontsize-ensurer.reset-size11.size7,.katex .sizing.reset-size11.size7{font-size:.48231511em}.katex .fontsize-ensurer.reset-size11.size8,.katex .sizing.reset-size11.size8{font-size:.57877814em}.katex .fontsize-ensurer.reset-size11.size9,.katex .sizing.reset-size11.size9{font-size:.69453376em}.katex .fontsize-ensurer.reset-size11.size10,.katex .sizing.reset-size11.size10{font-size:.83360129em}.katex .fontsize-ensurer.reset-size11.size11,.katex .sizing.reset-size11.size11{font-size:1em}.katex .delimsizing.size1{font-family:KaTeX_Size1}.katex .delimsizing.size2{font-family:KaTeX_Size2}.katex .delimsizing.size3{font-family:KaTeX_Size3}.katex .delimsizing.size4{font-family:KaTeX_Size4}.katex .delimsizing.mult .delim-size1>span{font-family:KaTeX_Size1}.katex .delimsizing.mult .delim-size4>span{font-family:KaTeX_Size4}.katex .nulldelimiter{display:inline-block;width:.12em}.katex .delimcenter,.katex .op-symbol{position:relative}.katex .op-symbol.small-op{font-family:KaTeX_Size1}.katex .op-symbol.large-op{font-family:KaTeX_Size2}.katex .accent>.vlist-t,.katex .op-limits>.vlist-t{text-align:center}.katex .accent .accent-body{position:relative}.katex .accent .accent-body:not(.accent-full){width:0}.katex .overlay{display:block}.katex .mtable .vertical-separator{display:inline-block;min-width:1px}.katex .mtable .arraycolsep{display:inline-block}.katex .mtable .col-align-c>.vlist-t{text-align:center}.katex .mtable .col-align-l>.vlist-t{text-align:left}.katex .mtable .col-align-r>.vlist-t{text-align:right}.katex .svg-align{text-align:left}.katex svg{fill:currentColor;stroke:currentColor;fill-rule:nonzero;fill-opacity:1;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;display:block;height:inherit;position:absolute;width:100%}.katex svg path{stroke:none}.katex img{border-style:none;max-height:none;max-width:none;min-height:0;min-width:0}.katex .stretchy{display:block;overflow:hidden;position:relative;width:100%}.katex .stretchy:after,.katex .stretchy:before{content:""}.katex .hide-tail{overflow:hidden;position:relative;width:100%}.katex .halfarrow-left{left:0;overflow:hidden;position:absolute;width:50.2%}.katex .halfarrow-right{overflow:hidden;position:absolute;right:0;width:50.2%}.katex .brace-left{left:0;overflow:hidden;position:absolute;width:25.1%}.katex .brace-center{left:25%;overflow:hidden;position:absolute;width:50%}.katex .brace-right{overflow:hidden;position:absolute;right:0;width:25.1%}.katex .x-arrow-pad{padding:0 .5em}.katex .cd-arrow-pad{padding:0 .55556em 0 .27778em}.katex .mover,.katex .munder,.katex .x-arrow{text-align:center}.katex .boxpad{padding:0 .3em}.katex .fbox,.katex .fcolorbox{border:.04em solid;box-sizing:border-box}.katex .cancel-pad{padding:0 .2em}.katex .cancel-lap{margin-left:-.2em;margin-right:-.2em}.katex .sout{border-bottom-style:solid;border-bottom-width:.08em}.katex .angl{border-right:.049em solid;border-top:.049em solid;box-sizing:border-box;margin-right:.03889em}.katex .anglpad{padding:0 .03889em}.katex .eqn-num:before{content:"(" counter(katexEqnNo) ")";counter-increment:katexEqnNo}.katex .mml-eqn-num:before{content:"(" counter(mmlEqnNo) ")";counter-increment:mmlEqnNo}.katex .mtr-glue{width:50%}.katex .cd-vert-arrow{display:inline-block;position:relative}.katex .cd-label-left{display:inline-block;position:absolute;right:calc(50% + .3em);text-align:left}.katex .cd-label-right{display:inline-block;left:calc(50% + .3em);position:absolute;text-align:right}.katex-display{display:block;margin:1em 0;text-align:center}.katex-display>.katex{display:block;text-align:center;white-space:nowrap}.katex-display>.katex>.katex-html{display:block;position:relative}.katex-display>.katex>.katex-html>.tag{position:absolute;right:0}.katex-display.leqno>.katex>.katex-html>.tag{left:0;right:auto}.katex-display.fleqn>.katex{padding-left:2em;text-align:left}body{counter-reset:katexEqnNo mmlEqnNo} diff --git a/katex.min.js b/katex.min.js deleted file mode 100644 index e4d78f243..000000000 --- a/katex.min.js +++ /dev/null @@ -1 +0,0 @@ -!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.katex=t():e.katex=t()}("undefined"!=typeof self?self:this,(function(){return function(){"use strict";var e={d:function(t,r){for(var n in r)e.o(r,n)&&!e.o(t,n)&&Object.defineProperty(t,n,{enumerable:!0,get:r[n]})},o:function(e,t){return Object.prototype.hasOwnProperty.call(e,t)}},t={};e.d(t,{default:function(){return Zn}});var r=function e(t,r){this.position=void 0;var n,a="KaTeX parse error: "+t,i=r&&r.loc;if(i&&i.start<=i.end){var o=i.lexer.input;n=i.start;var s=i.end;n===o.length?a+=" at end of input: ":a+=" at position "+(n+1)+": ";var l=o.slice(n,s).replace(/[^]/g,"$&\u0332");a+=(n>15?"\u2026"+o.slice(n-15,n):o.slice(0,n))+l+(s+15":">","<":"<",'"':""","'":"'"},o=/[&><"']/g;var s=function e(t){return"ordgroup"===t.type||"color"===t.type?1===t.body.length?e(t.body[0]):t:"font"===t.type?e(t.body):t},l={contains:function(e,t){return-1!==e.indexOf(t)},deflt:function(e,t){return void 0===e?t:e},escape:function(e){return String(e).replace(o,(function(e){return i[e]}))},hyphenate:function(e){return e.replace(a,"-$1").toLowerCase()},getBaseElem:s,isCharacterBox:function(e){var t=s(e);return"mathord"===t.type||"textord"===t.type||"atom"===t.type},protocolFromUrl:function(e){var t=/^\s*([^\\/#]*?)(?::|�*58|�*3a)/i.exec(e);return null!=t?t[1]:"_relative"}},h={displayMode:{type:"boolean",description:"Render math in display mode, which puts the math in display style (so \\int and \\sum are large, for example), and centers the math on the page on its own line.",cli:"-d, --display-mode"},output:{type:{enum:["htmlAndMathml","html","mathml"]},description:"Determines the markup language of the output.",cli:"-F, --format "},leqno:{type:"boolean",description:"Render display math in leqno style (left-justified tags)."},fleqn:{type:"boolean",description:"Render display math flush left."},throwOnError:{type:"boolean",default:!0,cli:"-t, --no-throw-on-error",cliDescription:"Render errors (in the color given by --error-color) instead of throwing a ParseError exception when encountering an error."},errorColor:{type:"string",default:"#cc0000",cli:"-c, --error-color ",cliDescription:"A color string given in the format 'rgb' or 'rrggbb' (no #). This option determines the color of errors rendered by the -t option.",cliProcessor:function(e){return"#"+e}},macros:{type:"object",cli:"-m, --macro ",cliDescription:"Define custom macro of the form '\\foo:expansion' (use multiple -m arguments for multiple macros).",cliDefault:[],cliProcessor:function(e,t){return t.push(e),t}},minRuleThickness:{type:"number",description:"Specifies a minimum thickness, in ems, for fraction lines, `\\sqrt` top lines, `{array}` vertical lines, `\\hline`, `\\hdashline`, `\\underline`, `\\overline`, and the borders of `\\fbox`, `\\boxed`, and `\\fcolorbox`.",processor:function(e){return Math.max(0,e)},cli:"--min-rule-thickness ",cliProcessor:parseFloat},colorIsTextColor:{type:"boolean",description:"Makes \\color behave like LaTeX's 2-argument \\textcolor, instead of LaTeX's one-argument \\color mode change.",cli:"-b, --color-is-text-color"},strict:{type:[{enum:["warn","ignore","error"]},"boolean","function"],description:"Turn on strict / LaTeX faithfulness mode, which throws an error if the input uses features that are not supported by LaTeX.",cli:"-S, --strict",cliDefault:!1},trust:{type:["boolean","function"],description:"Trust the input, enabling all HTML features such as \\url.",cli:"-T, --trust"},maxSize:{type:"number",default:1/0,description:"If non-zero, all user-specified sizes, e.g. in \\rule{500em}{500em}, will be capped to maxSize ems. Otherwise, elements and spaces can be arbitrarily large",processor:function(e){return Math.max(0,e)},cli:"-s, --max-size ",cliProcessor:parseInt},maxExpand:{type:"number",default:1e3,description:"Limit the number of macro expansions to the specified number, to prevent e.g. infinite macro loops. If set to Infinity, the macro expander will try to fully expand as in LaTeX.",processor:function(e){return Math.max(0,e)},cli:"-e, --max-expand ",cliProcessor:function(e){return"Infinity"===e?1/0:parseInt(e)}},globalGroup:{type:"boolean",cli:!1}};function m(e){if(e.default)return e.default;var t=e.type,r=Array.isArray(t)?t[0]:t;if("string"!=typeof r)return r.enum[0];switch(r){case"boolean":return!1;case"string":return"";case"number":return 0;case"object":return{}}}var c=function(){function e(e){for(var t in this.displayMode=void 0,this.output=void 0,this.leqno=void 0,this.fleqn=void 0,this.throwOnError=void 0,this.errorColor=void 0,this.macros=void 0,this.minRuleThickness=void 0,this.colorIsTextColor=void 0,this.strict=void 0,this.trust=void 0,this.maxSize=void 0,this.maxExpand=void 0,this.globalGroup=void 0,e=e||{},h)if(h.hasOwnProperty(t)){var r=h[t];this[t]=void 0!==e[t]?r.processor?r.processor(e[t]):e[t]:m(r)}}var t=e.prototype;return t.reportNonstrict=function(e,t,r){var a=this.strict;if("function"==typeof a&&(a=a(e,t,r)),a&&"ignore"!==a){if(!0===a||"error"===a)throw new n("LaTeX-incompatible input and strict mode is set to 'error': "+t+" ["+e+"]",r);"warn"===a?"undefined"!=typeof console&&console.warn("LaTeX-incompatible input and strict mode is set to 'warn': "+t+" ["+e+"]"):"undefined"!=typeof console&&console.warn("LaTeX-incompatible input and strict mode is set to unrecognized '"+a+"': "+t+" ["+e+"]")}},t.useStrictBehavior=function(e,t,r){var n=this.strict;if("function"==typeof n)try{n=n(e,t,r)}catch(e){n="error"}return!(!n||"ignore"===n)&&(!0===n||"error"===n||("warn"===n?("undefined"!=typeof console&&console.warn("LaTeX-incompatible input and strict mode is set to 'warn': "+t+" ["+e+"]"),!1):("undefined"!=typeof console&&console.warn("LaTeX-incompatible input and strict mode is set to unrecognized '"+n+"': "+t+" ["+e+"]"),!1)))},t.isTrusted=function(e){e.url&&!e.protocol&&(e.protocol=l.protocolFromUrl(e.url));var t="function"==typeof this.trust?this.trust(e):this.trust;return Boolean(t)},e}(),u=function(){function e(e,t,r){this.id=void 0,this.size=void 0,this.cramped=void 0,this.id=e,this.size=t,this.cramped=r}var t=e.prototype;return t.sup=function(){return p[d[this.id]]},t.sub=function(){return p[f[this.id]]},t.fracNum=function(){return p[g[this.id]]},t.fracDen=function(){return p[v[this.id]]},t.cramp=function(){return p[b[this.id]]},t.text=function(){return p[y[this.id]]},t.isTight=function(){return this.size>=2},e}(),p=[new u(0,0,!1),new u(1,0,!0),new u(2,1,!1),new u(3,1,!0),new u(4,2,!1),new u(5,2,!0),new u(6,3,!1),new u(7,3,!0)],d=[4,5,4,5,6,7,6,7],f=[5,5,5,5,7,7,7,7],g=[2,3,4,5,6,7,6,7],v=[3,3,5,5,7,7,7,7],b=[1,1,3,3,5,5,7,7],y=[0,1,2,3,2,3,2,3],x={DISPLAY:p[0],TEXT:p[2],SCRIPT:p[4],SCRIPTSCRIPT:p[6]},w=[{name:"latin",blocks:[[256,591],[768,879]]},{name:"cyrillic",blocks:[[1024,1279]]},{name:"armenian",blocks:[[1328,1423]]},{name:"brahmic",blocks:[[2304,4255]]},{name:"georgian",blocks:[[4256,4351]]},{name:"cjk",blocks:[[12288,12543],[19968,40879],[65280,65376]]},{name:"hangul",blocks:[[44032,55215]]}];var k=[];function S(e){for(var t=0;t=k[t]&&e<=k[t+1])return!0;return!1}w.forEach((function(e){return e.blocks.forEach((function(e){return k.push.apply(k,e)}))}));var M=80,z={doubleleftarrow:"M262 157\nl10-10c34-36 62.7-77 86-123 3.3-8 5-13.3 5-16 0-5.3-6.7-8-20-8-7.3\n 0-12.2.5-14.5 1.5-2.3 1-4.8 4.5-7.5 10.5-49.3 97.3-121.7 169.3-217 216-28\n 14-57.3 25-88 33-6.7 2-11 3.8-13 5.5-2 1.7-3 4.2-3 7.5s1 5.8 3 7.5\nc2 1.7 6.3 3.5 13 5.5 68 17.3 128.2 47.8 180.5 91.5 52.3 43.7 93.8 96.2 124.5\n 157.5 9.3 8 15.3 12.3 18 13h6c12-.7 18-4 18-10 0-2-1.7-7-5-15-23.3-46-52-87\n-86-123l-10-10h399738v-40H218c328 0 0 0 0 0l-10-8c-26.7-20-65.7-43-117-69 2.7\n-2 6-3.7 10-5 36.7-16 72.3-37.3 107-64l10-8h399782v-40z\nm8 0v40h399730v-40zm0 194v40h399730v-40z",doublerightarrow:"M399738 392l\n-10 10c-34 36-62.7 77-86 123-3.3 8-5 13.3-5 16 0 5.3 6.7 8 20 8 7.3 0 12.2-.5\n 14.5-1.5 2.3-1 4.8-4.5 7.5-10.5 49.3-97.3 121.7-169.3 217-216 28-14 57.3-25 88\n-33 6.7-2 11-3.8 13-5.5 2-1.7 3-4.2 3-7.5s-1-5.8-3-7.5c-2-1.7-6.3-3.5-13-5.5-68\n-17.3-128.2-47.8-180.5-91.5-52.3-43.7-93.8-96.2-124.5-157.5-9.3-8-15.3-12.3-18\n-13h-6c-12 .7-18 4-18 10 0 2 1.7 7 5 15 23.3 46 52 87 86 123l10 10H0v40h399782\nc-328 0 0 0 0 0l10 8c26.7 20 65.7 43 117 69-2.7 2-6 3.7-10 5-36.7 16-72.3 37.3\n-107 64l-10 8H0v40zM0 157v40h399730v-40zm0 194v40h399730v-40z",leftarrow:"M400000 241H110l3-3c68.7-52.7 113.7-120\n 135-202 4-14.7 6-23 6-25 0-7.3-7-11-21-11-8 0-13.2.8-15.5 2.5-2.3 1.7-4.2 5.8\n-5.5 12.5-1.3 4.7-2.7 10.3-4 17-12 48.7-34.8 92-68.5 130S65.3 228.3 18 247\nc-10 4-16 7.7-18 11 0 8.7 6 14.3 18 17 47.3 18.7 87.8 47 121.5 85S196 441.3 208\n 490c.7 2 1.3 5 2 9s1.2 6.7 1.5 8c.3 1.3 1 3.3 2 6s2.2 4.5 3.5 5.5c1.3 1 3.3\n 1.8 6 2.5s6 1 10 1c14 0 21-3.7 21-11 0-2-2-10.3-6-25-20-79.3-65-146.7-135-202\n l-3-3h399890zM100 241v40h399900v-40z",leftbrace:"M6 548l-6-6v-35l6-11c56-104 135.3-181.3 238-232 57.3-28.7 117\n-45 179-50h399577v120H403c-43.3 7-81 15-113 26-100.7 33-179.7 91-237 174-2.7\n 5-6 9-10 13-.7 1-7.3 1-20 1H6z",leftbraceunder:"M0 6l6-6h17c12.688 0 19.313.3 20 1 4 4 7.313 8.3 10 13\n 35.313 51.3 80.813 93.8 136.5 127.5 55.688 33.7 117.188 55.8 184.5 66.5.688\n 0 2 .3 4 1 18.688 2.7 76 4.3 172 5h399450v120H429l-6-1c-124.688-8-235-61.7\n-331-161C60.687 138.7 32.312 99.3 7 54L0 41V6z",leftgroup:"M400000 80\nH435C64 80 168.3 229.4 21 260c-5.9 1.2-18 0-18 0-2 0-3-1-3-3v-38C76 61 257 0\n 435 0h399565z",leftgroupunder:"M400000 262\nH435C64 262 168.3 112.6 21 82c-5.9-1.2-18 0-18 0-2 0-3 1-3 3v38c76 158 257 219\n 435 219h399565z",leftharpoon:"M0 267c.7 5.3 3 10 7 14h399993v-40H93c3.3\n-3.3 10.2-9.5 20.5-18.5s17.8-15.8 22.5-20.5c50.7-52 88-110.3 112-175 4-11.3 5\n-18.3 3-21-1.3-4-7.3-6-18-6-8 0-13 .7-15 2s-4.7 6.7-8 16c-42 98.7-107.3 174.7\n-196 228-6.7 4.7-10.7 8-12 10-1.3 2-2 5.7-2 11zm100-26v40h399900v-40z",leftharpoonplus:"M0 267c.7 5.3 3 10 7 14h399993v-40H93c3.3-3.3 10.2-9.5\n 20.5-18.5s17.8-15.8 22.5-20.5c50.7-52 88-110.3 112-175 4-11.3 5-18.3 3-21-1.3\n-4-7.3-6-18-6-8 0-13 .7-15 2s-4.7 6.7-8 16c-42 98.7-107.3 174.7-196 228-6.7 4.7\n-10.7 8-12 10-1.3 2-2 5.7-2 11zm100-26v40h399900v-40zM0 435v40h400000v-40z\nm0 0v40h400000v-40z",leftharpoondown:"M7 241c-4 4-6.333 8.667-7 14 0 5.333.667 9 2 11s5.333\n 5.333 12 10c90.667 54 156 130 196 228 3.333 10.667 6.333 16.333 9 17 2 .667 5\n 1 9 1h5c10.667 0 16.667-2 18-6 2-2.667 1-9.667-3-21-32-87.333-82.667-157.667\n-152-211l-3-3h399907v-40zM93 281 H400000 v-40L7 241z",leftharpoondownplus:"M7 435c-4 4-6.3 8.7-7 14 0 5.3.7 9 2 11s5.3 5.3 12\n 10c90.7 54 156 130 196 228 3.3 10.7 6.3 16.3 9 17 2 .7 5 1 9 1h5c10.7 0 16.7\n-2 18-6 2-2.7 1-9.7-3-21-32-87.3-82.7-157.7-152-211l-3-3h399907v-40H7zm93 0\nv40h399900v-40zM0 241v40h399900v-40zm0 0v40h399900v-40z",lefthook:"M400000 281 H103s-33-11.2-61-33.5S0 197.3 0 164s14.2-61.2 42.5\n-83.5C70.8 58.2 104 47 142 47 c16.7 0 25 6.7 25 20 0 12-8.7 18.7-26 20-40 3.3\n-68.7 15.7-86 37-10 12-15 25.3-15 40 0 22.7 9.8 40.7 29.5 54 19.7 13.3 43.5 21\n 71.5 23h399859zM103 281v-40h399897v40z",leftlinesegment:"M40 281 V428 H0 V94 H40 V241 H400000 v40z\nM40 281 V428 H0 V94 H40 V241 H400000 v40z",leftmapsto:"M40 281 V448H0V74H40V241H400000v40z\nM40 281 V448H0V74H40V241H400000v40z",leftToFrom:"M0 147h400000v40H0zm0 214c68 40 115.7 95.7 143 167h22c15.3 0 23\n-.3 23-1 0-1.3-5.3-13.7-16-37-18-35.3-41.3-69-70-101l-7-8h399905v-40H95l7-8\nc28.7-32 52-65.7 70-101 10.7-23.3 16-35.7 16-37 0-.7-7.7-1-23-1h-22C115.7 265.3\n 68 321 0 361zm0-174v-40h399900v40zm100 154v40h399900v-40z",longequal:"M0 50 h400000 v40H0z m0 194h40000v40H0z\nM0 50 h400000 v40H0z m0 194h40000v40H0z",midbrace:"M200428 334\nc-100.7-8.3-195.3-44-280-108-55.3-42-101.7-93-139-153l-9-14c-2.7 4-5.7 8.7-9 14\n-53.3 86.7-123.7 153-211 199-66.7 36-137.3 56.3-212 62H0V214h199568c178.3-11.7\n 311.7-78.3 403-201 6-8 9.7-12 11-12 .7-.7 6.7-1 18-1s17.3.3 18 1c1.3 0 5 4 11\n 12 44.7 59.3 101.3 106.3 170 141s145.3 54.3 229 60h199572v120z",midbraceunder:"M199572 214\nc100.7 8.3 195.3 44 280 108 55.3 42 101.7 93 139 153l9 14c2.7-4 5.7-8.7 9-14\n 53.3-86.7 123.7-153 211-199 66.7-36 137.3-56.3 212-62h199568v120H200432c-178.3\n 11.7-311.7 78.3-403 201-6 8-9.7 12-11 12-.7.7-6.7 1-18 1s-17.3-.3-18-1c-1.3 0\n-5-4-11-12-44.7-59.3-101.3-106.3-170-141s-145.3-54.3-229-60H0V214z",oiintSize1:"M512.6 71.6c272.6 0 320.3 106.8 320.3 178.2 0 70.8-47.7 177.6\n-320.3 177.6S193.1 320.6 193.1 249.8c0-71.4 46.9-178.2 319.5-178.2z\nm368.1 178.2c0-86.4-60.9-215.4-368.1-215.4-306.4 0-367.3 129-367.3 215.4 0 85.8\n60.9 214.8 367.3 214.8 307.2 0 368.1-129 368.1-214.8z",oiintSize2:"M757.8 100.1c384.7 0 451.1 137.6 451.1 230 0 91.3-66.4 228.8\n-451.1 228.8-386.3 0-452.7-137.5-452.7-228.8 0-92.4 66.4-230 452.7-230z\nm502.4 230c0-111.2-82.4-277.2-502.4-277.2s-504 166-504 277.2\nc0 110 84 276 504 276s502.4-166 502.4-276z",oiiintSize1:"M681.4 71.6c408.9 0 480.5 106.8 480.5 178.2 0 70.8-71.6 177.6\n-480.5 177.6S202.1 320.6 202.1 249.8c0-71.4 70.5-178.2 479.3-178.2z\nm525.8 178.2c0-86.4-86.8-215.4-525.7-215.4-437.9 0-524.7 129-524.7 215.4 0\n85.8 86.8 214.8 524.7 214.8 438.9 0 525.7-129 525.7-214.8z",oiiintSize2:"M1021.2 53c603.6 0 707.8 165.8 707.8 277.2 0 110-104.2 275.8\n-707.8 275.8-606 0-710.2-165.8-710.2-275.8C311 218.8 415.2 53 1021.2 53z\nm770.4 277.1c0-131.2-126.4-327.6-770.5-327.6S248.4 198.9 248.4 330.1\nc0 130 128.8 326.4 772.7 326.4s770.5-196.4 770.5-326.4z",rightarrow:"M0 241v40h399891c-47.3 35.3-84 78-110 128\n-16.7 32-27.7 63.7-33 95 0 1.3-.2 2.7-.5 4-.3 1.3-.5 2.3-.5 3 0 7.3 6.7 11 20\n 11 8 0 13.2-.8 15.5-2.5 2.3-1.7 4.2-5.5 5.5-11.5 2-13.3 5.7-27 11-41 14.7-44.7\n 39-84.5 73-119.5s73.7-60.2 119-75.5c6-2 9-5.7 9-11s-3-9-9-11c-45.3-15.3-85\n-40.5-119-75.5s-58.3-74.8-73-119.5c-4.7-14-8.3-27.3-11-40-1.3-6.7-3.2-10.8-5.5\n-12.5-2.3-1.7-7.5-2.5-15.5-2.5-14 0-21 3.7-21 11 0 2 2 10.3 6 25 20.7 83.3 67\n 151.7 139 205zm0 0v40h399900v-40z",rightbrace:"M400000 542l\n-6 6h-17c-12.7 0-19.3-.3-20-1-4-4-7.3-8.3-10-13-35.3-51.3-80.8-93.8-136.5-127.5\ns-117.2-55.8-184.5-66.5c-.7 0-2-.3-4-1-18.7-2.7-76-4.3-172-5H0V214h399571l6 1\nc124.7 8 235 61.7 331 161 31.3 33.3 59.7 72.7 85 118l7 13v35z",rightbraceunder:"M399994 0l6 6v35l-6 11c-56 104-135.3 181.3-238 232-57.3\n 28.7-117 45-179 50H-300V214h399897c43.3-7 81-15 113-26 100.7-33 179.7-91 237\n-174 2.7-5 6-9 10-13 .7-1 7.3-1 20-1h17z",rightgroup:"M0 80h399565c371 0 266.7 149.4 414 180 5.9 1.2 18 0 18 0 2 0\n 3-1 3-3v-38c-76-158-257-219-435-219H0z",rightgroupunder:"M0 262h399565c371 0 266.7-149.4 414-180 5.9-1.2 18 0 18\n 0 2 0 3 1 3 3v38c-76 158-257 219-435 219H0z",rightharpoon:"M0 241v40h399993c4.7-4.7 7-9.3 7-14 0-9.3\n-3.7-15.3-11-18-92.7-56.7-159-133.7-199-231-3.3-9.3-6-14.7-8-16-2-1.3-7-2-15-2\n-10.7 0-16.7 2-18 6-2 2.7-1 9.7 3 21 15.3 42 36.7 81.8 64 119.5 27.3 37.7 58\n 69.2 92 94.5zm0 0v40h399900v-40z",rightharpoonplus:"M0 241v40h399993c4.7-4.7 7-9.3 7-14 0-9.3-3.7-15.3-11\n-18-92.7-56.7-159-133.7-199-231-3.3-9.3-6-14.7-8-16-2-1.3-7-2-15-2-10.7 0-16.7\n 2-18 6-2 2.7-1 9.7 3 21 15.3 42 36.7 81.8 64 119.5 27.3 37.7 58 69.2 92 94.5z\nm0 0v40h399900v-40z m100 194v40h399900v-40zm0 0v40h399900v-40z",rightharpoondown:"M399747 511c0 7.3 6.7 11 20 11 8 0 13-.8 15-2.5s4.7-6.8\n 8-15.5c40-94 99.3-166.3 178-217 13.3-8 20.3-12.3 21-13 5.3-3.3 8.5-5.8 9.5\n-7.5 1-1.7 1.5-5.2 1.5-10.5s-2.3-10.3-7-15H0v40h399908c-34 25.3-64.7 57-92 95\n-27.3 38-48.7 77.7-64 119-3.3 8.7-5 14-5 16zM0 241v40h399900v-40z",rightharpoondownplus:"M399747 705c0 7.3 6.7 11 20 11 8 0 13-.8\n 15-2.5s4.7-6.8 8-15.5c40-94 99.3-166.3 178-217 13.3-8 20.3-12.3 21-13 5.3-3.3\n 8.5-5.8 9.5-7.5 1-1.7 1.5-5.2 1.5-10.5s-2.3-10.3-7-15H0v40h399908c-34 25.3\n-64.7 57-92 95-27.3 38-48.7 77.7-64 119-3.3 8.7-5 14-5 16zM0 435v40h399900v-40z\nm0-194v40h400000v-40zm0 0v40h400000v-40z",righthook:"M399859 241c-764 0 0 0 0 0 40-3.3 68.7-15.7 86-37 10-12 15-25.3\n 15-40 0-22.7-9.8-40.7-29.5-54-19.7-13.3-43.5-21-71.5-23-17.3-1.3-26-8-26-20 0\n-13.3 8.7-20 26-20 38 0 71 11.2 99 33.5 0 0 7 5.6 21 16.7 14 11.2 21 33.5 21\n 66.8s-14 61.2-42 83.5c-28 22.3-61 33.5-99 33.5L0 241z M0 281v-40h399859v40z",rightlinesegment:"M399960 241 V94 h40 V428 h-40 V281 H0 v-40z\nM399960 241 V94 h40 V428 h-40 V281 H0 v-40z",rightToFrom:"M400000 167c-70.7-42-118-97.7-142-167h-23c-15.3 0-23 .3-23\n 1 0 1.3 5.3 13.7 16 37 18 35.3 41.3 69 70 101l7 8H0v40h399905l-7 8c-28.7 32\n-52 65.7-70 101-10.7 23.3-16 35.7-16 37 0 .7 7.7 1 23 1h23c24-69.3 71.3-125 142\n-167z M100 147v40h399900v-40zM0 341v40h399900v-40z",twoheadleftarrow:"M0 167c68 40\n 115.7 95.7 143 167h22c15.3 0 23-.3 23-1 0-1.3-5.3-13.7-16-37-18-35.3-41.3-69\n-70-101l-7-8h125l9 7c50.7 39.3 85 86 103 140h46c0-4.7-6.3-18.7-19-42-18-35.3\n-40-67.3-66-96l-9-9h399716v-40H284l9-9c26-28.7 48-60.7 66-96 12.7-23.333 19\n-37.333 19-42h-46c-18 54-52.3 100.7-103 140l-9 7H95l7-8c28.7-32 52-65.7 70-101\n 10.7-23.333 16-35.7 16-37 0-.7-7.7-1-23-1h-22C115.7 71.3 68 127 0 167z",twoheadrightarrow:"M400000 167\nc-68-40-115.7-95.7-143-167h-22c-15.3 0-23 .3-23 1 0 1.3 5.3 13.7 16 37 18 35.3\n 41.3 69 70 101l7 8h-125l-9-7c-50.7-39.3-85-86-103-140h-46c0 4.7 6.3 18.7 19 42\n 18 35.3 40 67.3 66 96l9 9H0v40h399716l-9 9c-26 28.7-48 60.7-66 96-12.7 23.333\n-19 37.333-19 42h46c18-54 52.3-100.7 103-140l9-7h125l-7 8c-28.7 32-52 65.7-70\n 101-10.7 23.333-16 35.7-16 37 0 .7 7.7 1 23 1h22c27.3-71.3 75-127 143-167z",tilde1:"M200 55.538c-77 0-168 73.953-177 73.953-3 0-7\n-2.175-9-5.437L2 97c-1-2-2-4-2-6 0-4 2-7 5-9l20-12C116 12 171 0 207 0c86 0\n 114 68 191 68 78 0 168-68 177-68 4 0 7 2 9 5l12 19c1 2.175 2 4.35 2 6.525 0\n 4.35-2 7.613-5 9.788l-19 13.05c-92 63.077-116.937 75.308-183 76.128\n-68.267.847-113-73.952-191-73.952z",tilde2:"M344 55.266c-142 0-300.638 81.316-311.5 86.418\n-8.01 3.762-22.5 10.91-23.5 5.562L1 120c-1-2-1-3-1-4 0-5 3-9 8-10l18.4-9C160.9\n 31.9 283 0 358 0c148 0 188 122 331 122s314-97 326-97c4 0 8 2 10 7l7 21.114\nc1 2.14 1 3.21 1 4.28 0 5.347-3 9.626-7 10.696l-22.3 12.622C852.6 158.372 751\n 181.476 676 181.476c-149 0-189-126.21-332-126.21z",tilde3:"M786 59C457 59 32 175.242 13 175.242c-6 0-10-3.457\n-11-10.37L.15 138c-1-7 3-12 10-13l19.2-6.4C378.4 40.7 634.3 0 804.3 0c337 0\n 411.8 157 746.8 157 328 0 754-112 773-112 5 0 10 3 11 9l1 14.075c1 8.066-.697\n 16.595-6.697 17.492l-21.052 7.31c-367.9 98.146-609.15 122.696-778.15 122.696\n -338 0-409-156.573-744-156.573z",tilde4:"M786 58C457 58 32 177.487 13 177.487c-6 0-10-3.345\n-11-10.035L.15 143c-1-7 3-12 10-13l22-6.7C381.2 35 637.15 0 807.15 0c337 0 409\n 177 744 177 328 0 754-127 773-127 5 0 10 3 11 9l1 14.794c1 7.805-3 13.38-9\n 14.495l-20.7 5.574c-366.85 99.79-607.3 139.372-776.3 139.372-338 0-409\n -175.236-744-175.236z",vec:"M377 20c0-5.333 1.833-10 5.5-14S391 0 397 0c4.667 0 8.667 1.667 12 5\n3.333 2.667 6.667 9 10 19 6.667 24.667 20.333 43.667 41 57 7.333 4.667 11\n10.667 11 18 0 6-1 10-3 12s-6.667 5-14 9c-28.667 14.667-53.667 35.667-75 63\n-1.333 1.333-3.167 3.5-5.5 6.5s-4 4.833-5 5.5c-1 .667-2.5 1.333-4.5 2s-4.333 1\n-7 1c-4.667 0-9.167-1.833-13.5-5.5S337 184 337 178c0-12.667 15.667-32.333 47-59\nH213l-171-1c-8.667-6-13-12.333-13-19 0-4.667 4.333-11.333 13-20h359\nc-16-25.333-24-45-24-59z",widehat1:"M529 0h5l519 115c5 1 9 5 9 10 0 1-1 2-1 3l-4 22\nc-1 5-5 9-11 9h-2L532 67 19 159h-2c-5 0-9-4-11-9l-5-22c-1-6 2-12 8-13z",widehat2:"M1181 0h2l1171 176c6 0 10 5 10 11l-2 23c-1 6-5 10\n-11 10h-1L1182 67 15 220h-1c-6 0-10-4-11-10l-2-23c-1-6 4-11 10-11z",widehat3:"M1181 0h2l1171 236c6 0 10 5 10 11l-2 23c-1 6-5 10\n-11 10h-1L1182 67 15 280h-1c-6 0-10-4-11-10l-2-23c-1-6 4-11 10-11z",widehat4:"M1181 0h2l1171 296c6 0 10 5 10 11l-2 23c-1 6-5 10\n-11 10h-1L1182 67 15 340h-1c-6 0-10-4-11-10l-2-23c-1-6 4-11 10-11z",widecheck1:"M529,159h5l519,-115c5,-1,9,-5,9,-10c0,-1,-1,-2,-1,-3l-4,-22c-1,\n-5,-5,-9,-11,-9h-2l-512,92l-513,-92h-2c-5,0,-9,4,-11,9l-5,22c-1,6,2,12,8,13z",widecheck2:"M1181,220h2l1171,-176c6,0,10,-5,10,-11l-2,-23c-1,-6,-5,-10,\n-11,-10h-1l-1168,153l-1167,-153h-1c-6,0,-10,4,-11,10l-2,23c-1,6,4,11,10,11z",widecheck3:"M1181,280h2l1171,-236c6,0,10,-5,10,-11l-2,-23c-1,-6,-5,-10,\n-11,-10h-1l-1168,213l-1167,-213h-1c-6,0,-10,4,-11,10l-2,23c-1,6,4,11,10,11z",widecheck4:"M1181,340h2l1171,-296c6,0,10,-5,10,-11l-2,-23c-1,-6,-5,-10,\n-11,-10h-1l-1168,273l-1167,-273h-1c-6,0,-10,4,-11,10l-2,23c-1,6,4,11,10,11z",baraboveleftarrow:"M400000 620h-399890l3 -3c68.7 -52.7 113.7 -120 135 -202\nc4 -14.7 6 -23 6 -25c0 -7.3 -7 -11 -21 -11c-8 0 -13.2 0.8 -15.5 2.5\nc-2.3 1.7 -4.2 5.8 -5.5 12.5c-1.3 4.7 -2.7 10.3 -4 17c-12 48.7 -34.8 92 -68.5 130\ns-74.2 66.3 -121.5 85c-10 4 -16 7.7 -18 11c0 8.7 6 14.3 18 17c47.3 18.7 87.8 47\n121.5 85s56.5 81.3 68.5 130c0.7 2 1.3 5 2 9s1.2 6.7 1.5 8c0.3 1.3 1 3.3 2 6\ns2.2 4.5 3.5 5.5c1.3 1 3.3 1.8 6 2.5s6 1 10 1c14 0 21 -3.7 21 -11\nc0 -2 -2 -10.3 -6 -25c-20 -79.3 -65 -146.7 -135 -202l-3 -3h399890z\nM100 620v40h399900v-40z M0 241v40h399900v-40zM0 241v40h399900v-40z",rightarrowabovebar:"M0 241v40h399891c-47.3 35.3-84 78-110 128-16.7 32\n-27.7 63.7-33 95 0 1.3-.2 2.7-.5 4-.3 1.3-.5 2.3-.5 3 0 7.3 6.7 11 20 11 8 0\n13.2-.8 15.5-2.5 2.3-1.7 4.2-5.5 5.5-11.5 2-13.3 5.7-27 11-41 14.7-44.7 39\n-84.5 73-119.5s73.7-60.2 119-75.5c6-2 9-5.7 9-11s-3-9-9-11c-45.3-15.3-85-40.5\n-119-75.5s-58.3-74.8-73-119.5c-4.7-14-8.3-27.3-11-40-1.3-6.7-3.2-10.8-5.5\n-12.5-2.3-1.7-7.5-2.5-15.5-2.5-14 0-21 3.7-21 11 0 2 2 10.3 6 25 20.7 83.3 67\n151.7 139 205zm96 379h399894v40H0zm0 0h399904v40H0z",baraboveshortleftharpoon:"M507,435c-4,4,-6.3,8.7,-7,14c0,5.3,0.7,9,2,11\nc1.3,2,5.3,5.3,12,10c90.7,54,156,130,196,228c3.3,10.7,6.3,16.3,9,17\nc2,0.7,5,1,9,1c0,0,5,0,5,0c10.7,0,16.7,-2,18,-6c2,-2.7,1,-9.7,-3,-21\nc-32,-87.3,-82.7,-157.7,-152,-211c0,0,-3,-3,-3,-3l399351,0l0,-40\nc-398570,0,-399437,0,-399437,0z M593 435 v40 H399500 v-40z\nM0 281 v-40 H399908 v40z M0 281 v-40 H399908 v40z",rightharpoonaboveshortbar:"M0,241 l0,40c399126,0,399993,0,399993,0\nc4.7,-4.7,7,-9.3,7,-14c0,-9.3,-3.7,-15.3,-11,-18c-92.7,-56.7,-159,-133.7,-199,\n-231c-3.3,-9.3,-6,-14.7,-8,-16c-2,-1.3,-7,-2,-15,-2c-10.7,0,-16.7,2,-18,6\nc-2,2.7,-1,9.7,3,21c15.3,42,36.7,81.8,64,119.5c27.3,37.7,58,69.2,92,94.5z\nM0 241 v40 H399908 v-40z M0 475 v-40 H399500 v40z M0 475 v-40 H399500 v40z",shortbaraboveleftharpoon:"M7,435c-4,4,-6.3,8.7,-7,14c0,5.3,0.7,9,2,11\nc1.3,2,5.3,5.3,12,10c90.7,54,156,130,196,228c3.3,10.7,6.3,16.3,9,17c2,0.7,5,1,9,\n1c0,0,5,0,5,0c10.7,0,16.7,-2,18,-6c2,-2.7,1,-9.7,-3,-21c-32,-87.3,-82.7,-157.7,\n-152,-211c0,0,-3,-3,-3,-3l399907,0l0,-40c-399126,0,-399993,0,-399993,0z\nM93 435 v40 H400000 v-40z M500 241 v40 H400000 v-40z M500 241 v40 H400000 v-40z",shortrightharpoonabovebar:"M53,241l0,40c398570,0,399437,0,399437,0\nc4.7,-4.7,7,-9.3,7,-14c0,-9.3,-3.7,-15.3,-11,-18c-92.7,-56.7,-159,-133.7,-199,\n-231c-3.3,-9.3,-6,-14.7,-8,-16c-2,-1.3,-7,-2,-15,-2c-10.7,0,-16.7,2,-18,6\nc-2,2.7,-1,9.7,3,21c15.3,42,36.7,81.8,64,119.5c27.3,37.7,58,69.2,92,94.5z\nM500 241 v40 H399408 v-40z M500 435 v40 H400000 v-40z"},A=function(){function e(e){this.children=void 0,this.classes=void 0,this.height=void 0,this.depth=void 0,this.maxFontSize=void 0,this.style=void 0,this.children=e,this.classes=[],this.height=0,this.depth=0,this.maxFontSize=0,this.style={}}var t=e.prototype;return t.hasClass=function(e){return l.contains(this.classes,e)},t.toNode=function(){for(var e=document.createDocumentFragment(),t=0;t=5?0:e>=3?1:2]){var r=N[t]={cssEmPerMu:B.quad[t]/18};for(var n in B)B.hasOwnProperty(n)&&(r[n]=B[n][t])}return N[t]}(this.size)),this._fontMetrics},t.getColor=function(){return this.phantom?"transparent":this.color},e}();H.BASESIZE=6;var E=H,L={pt:1,mm:7227/2540,cm:7227/254,in:72.27,bp:1.00375,pc:12,dd:1238/1157,cc:14856/1157,nd:685/642,nc:1370/107,sp:1/65536,px:1.00375},D={ex:!0,em:!0,mu:!0},P=function(e){return"string"!=typeof e&&(e=e.unit),e in L||e in D||"ex"===e},F=function(e,t){var r;if(e.unit in L)r=L[e.unit]/t.fontMetrics().ptPerEm/t.sizeMultiplier;else if("mu"===e.unit)r=t.fontMetrics().cssEmPerMu;else{var a;if(a=t.style.isTight()?t.havingStyle(t.style.text()):t,"ex"===e.unit)r=a.fontMetrics().xHeight;else{if("em"!==e.unit)throw new n("Invalid unit: '"+e.unit+"'");r=a.fontMetrics().quad}a!==t&&(r*=a.sizeMultiplier/t.sizeMultiplier)}return Math.min(e.number*r,t.maxSize)},V=function(e){return+e.toFixed(4)+"em"},G=function(e){return e.filter((function(e){return e})).join(" ")},U=function(e,t,r){if(this.classes=e||[],this.attributes={},this.height=0,this.depth=0,this.maxFontSize=0,this.style=r||{},t){t.style.isTight()&&this.classes.push("mtight");var n=t.getColor();n&&(this.style.color=n)}},Y=function(e){var t=document.createElement(e);for(var r in t.className=G(this.classes),this.style)this.style.hasOwnProperty(r)&&(t.style[r]=this.style[r]);for(var n in this.attributes)this.attributes.hasOwnProperty(n)&&t.setAttribute(n,this.attributes[n]);for(var a=0;a"},W=function(){function e(e,t,r,n){this.children=void 0,this.attributes=void 0,this.classes=void 0,this.height=void 0,this.depth=void 0,this.width=void 0,this.maxFontSize=void 0,this.style=void 0,U.call(this,e,r,n),this.children=t||[]}var t=e.prototype;return t.setAttribute=function(e,t){this.attributes[e]=t},t.hasClass=function(e){return l.contains(this.classes,e)},t.toNode=function(){return Y.call(this,"span")},t.toMarkup=function(){return X.call(this,"span")},e}(),_=function(){function e(e,t,r,n){this.children=void 0,this.attributes=void 0,this.classes=void 0,this.height=void 0,this.depth=void 0,this.maxFontSize=void 0,this.style=void 0,U.call(this,t,n),this.children=r||[],this.setAttribute("href",e)}var t=e.prototype;return t.setAttribute=function(e,t){this.attributes[e]=t},t.hasClass=function(e){return l.contains(this.classes,e)},t.toNode=function(){return Y.call(this,"a")},t.toMarkup=function(){return X.call(this,"a")},e}(),j=function(){function e(e,t,r){this.src=void 0,this.alt=void 0,this.classes=void 0,this.height=void 0,this.depth=void 0,this.maxFontSize=void 0,this.style=void 0,this.alt=t,this.src=e,this.classes=["mord"],this.style=r}var t=e.prototype;return t.hasClass=function(e){return l.contains(this.classes,e)},t.toNode=function(){var e=document.createElement("img");for(var t in e.src=this.src,e.alt=this.alt,e.className="mord",this.style)this.style.hasOwnProperty(t)&&(e.style[t]=this.style[t]);return e},t.toMarkup=function(){var e=""+this.alt+"=a[0]&&e<=a[1])return r.name}return null}(this.text.charCodeAt(0));l&&this.classes.push(l+"_fallback"),/[\xee\xef\xed\xec]/.test(this.text)&&(this.text=$[this.text])}var t=e.prototype;return t.hasClass=function(e){return l.contains(this.classes,e)},t.toNode=function(){var e=document.createTextNode(this.text),t=null;for(var r in this.italic>0&&((t=document.createElement("span")).style.marginRight=V(this.italic)),this.classes.length>0&&((t=t||document.createElement("span")).className=G(this.classes)),this.style)this.style.hasOwnProperty(r)&&((t=t||document.createElement("span")).style[r]=this.style[r]);return t?(t.appendChild(e),t):e},t.toMarkup=function(){var e=!1,t="0&&(r+="margin-right:"+this.italic+"em;"),this.style)this.style.hasOwnProperty(n)&&(r+=l.hyphenate(n)+":"+this.style[n]+";");r&&(e=!0,t+=' style="'+l.escape(r)+'"');var a=l.escape(this.text);return e?(t+=">",t+=a,t+=""):a},e}(),K=function(){function e(e,t){this.children=void 0,this.attributes=void 0,this.children=e||[],this.attributes=t||{}}var t=e.prototype;return t.toNode=function(){var e=document.createElementNS("http://www.w3.org/2000/svg","svg");for(var t in this.attributes)Object.prototype.hasOwnProperty.call(this.attributes,t)&&e.setAttribute(t,this.attributes[t]);for(var r=0;r":""},e}(),Q=function(){function e(e){this.attributes=void 0,this.attributes=e||{}}var t=e.prototype;return t.toNode=function(){var e=document.createElementNS("http://www.w3.org/2000/svg","line");for(var t in this.attributes)Object.prototype.hasOwnProperty.call(this.attributes,t)&&e.setAttribute(t,this.attributes[t]);return e},t.toMarkup=function(){var e="","\\gt",!0),ie(oe,le,be,"\u2208","\\in",!0),ie(oe,le,be,"\ue020","\\@not"),ie(oe,le,be,"\u2282","\\subset",!0),ie(oe,le,be,"\u2283","\\supset",!0),ie(oe,le,be,"\u2286","\\subseteq",!0),ie(oe,le,be,"\u2287","\\supseteq",!0),ie(oe,he,be,"\u2288","\\nsubseteq",!0),ie(oe,he,be,"\u2289","\\nsupseteq",!0),ie(oe,le,be,"\u22a8","\\models"),ie(oe,le,be,"\u2190","\\leftarrow",!0),ie(oe,le,be,"\u2264","\\le"),ie(oe,le,be,"\u2264","\\leq",!0),ie(oe,le,be,"<","\\lt",!0),ie(oe,le,be,"\u2192","\\rightarrow",!0),ie(oe,le,be,"\u2192","\\to"),ie(oe,he,be,"\u2271","\\ngeq",!0),ie(oe,he,be,"\u2270","\\nleq",!0),ie(oe,le,ye,"\xa0","\\ "),ie(oe,le,ye,"\xa0","\\space"),ie(oe,le,ye,"\xa0","\\nobreakspace"),ie(se,le,ye,"\xa0","\\ "),ie(se,le,ye,"\xa0"," "),ie(se,le,ye,"\xa0","\\space"),ie(se,le,ye,"\xa0","\\nobreakspace"),ie(oe,le,ye,null,"\\nobreak"),ie(oe,le,ye,null,"\\allowbreak"),ie(oe,le,ve,",",","),ie(oe,le,ve,";",";"),ie(oe,he,ce,"\u22bc","\\barwedge",!0),ie(oe,he,ce,"\u22bb","\\veebar",!0),ie(oe,le,ce,"\u2299","\\odot",!0),ie(oe,le,ce,"\u2295","\\oplus",!0),ie(oe,le,ce,"\u2297","\\otimes",!0),ie(oe,le,xe,"\u2202","\\partial",!0),ie(oe,le,ce,"\u2298","\\oslash",!0),ie(oe,he,ce,"\u229a","\\circledcirc",!0),ie(oe,he,ce,"\u22a1","\\boxdot",!0),ie(oe,le,ce,"\u25b3","\\bigtriangleup"),ie(oe,le,ce,"\u25bd","\\bigtriangledown"),ie(oe,le,ce,"\u2020","\\dagger"),ie(oe,le,ce,"\u22c4","\\diamond"),ie(oe,le,ce,"\u22c6","\\star"),ie(oe,le,ce,"\u25c3","\\triangleleft"),ie(oe,le,ce,"\u25b9","\\triangleright"),ie(oe,le,ge,"{","\\{"),ie(se,le,xe,"{","\\{"),ie(se,le,xe,"{","\\textbraceleft"),ie(oe,le,ue,"}","\\}"),ie(se,le,xe,"}","\\}"),ie(se,le,xe,"}","\\textbraceright"),ie(oe,le,ge,"{","\\lbrace"),ie(oe,le,ue,"}","\\rbrace"),ie(oe,le,ge,"[","\\lbrack",!0),ie(se,le,xe,"[","\\lbrack",!0),ie(oe,le,ue,"]","\\rbrack",!0),ie(se,le,xe,"]","\\rbrack",!0),ie(oe,le,ge,"(","\\lparen",!0),ie(oe,le,ue,")","\\rparen",!0),ie(se,le,xe,"<","\\textless",!0),ie(se,le,xe,">","\\textgreater",!0),ie(oe,le,ge,"\u230a","\\lfloor",!0),ie(oe,le,ue,"\u230b","\\rfloor",!0),ie(oe,le,ge,"\u2308","\\lceil",!0),ie(oe,le,ue,"\u2309","\\rceil",!0),ie(oe,le,xe,"\\","\\backslash"),ie(oe,le,xe,"\u2223","|"),ie(oe,le,xe,"\u2223","\\vert"),ie(se,le,xe,"|","\\textbar",!0),ie(oe,le,xe,"\u2225","\\|"),ie(oe,le,xe,"\u2225","\\Vert"),ie(se,le,xe,"\u2225","\\textbardbl"),ie(se,le,xe,"~","\\textasciitilde"),ie(se,le,xe,"\\","\\textbackslash"),ie(se,le,xe,"^","\\textasciicircum"),ie(oe,le,be,"\u2191","\\uparrow",!0),ie(oe,le,be,"\u21d1","\\Uparrow",!0),ie(oe,le,be,"\u2193","\\downarrow",!0),ie(oe,le,be,"\u21d3","\\Downarrow",!0),ie(oe,le,be,"\u2195","\\updownarrow",!0),ie(oe,le,be,"\u21d5","\\Updownarrow",!0),ie(oe,le,fe,"\u2210","\\coprod"),ie(oe,le,fe,"\u22c1","\\bigvee"),ie(oe,le,fe,"\u22c0","\\bigwedge"),ie(oe,le,fe,"\u2a04","\\biguplus"),ie(oe,le,fe,"\u22c2","\\bigcap"),ie(oe,le,fe,"\u22c3","\\bigcup"),ie(oe,le,fe,"\u222b","\\int"),ie(oe,le,fe,"\u222b","\\intop"),ie(oe,le,fe,"\u222c","\\iint"),ie(oe,le,fe,"\u222d","\\iiint"),ie(oe,le,fe,"\u220f","\\prod"),ie(oe,le,fe,"\u2211","\\sum"),ie(oe,le,fe,"\u2a02","\\bigotimes"),ie(oe,le,fe,"\u2a01","\\bigoplus"),ie(oe,le,fe,"\u2a00","\\bigodot"),ie(oe,le,fe,"\u222e","\\oint"),ie(oe,le,fe,"\u222f","\\oiint"),ie(oe,le,fe,"\u2230","\\oiiint"),ie(oe,le,fe,"\u2a06","\\bigsqcup"),ie(oe,le,fe,"\u222b","\\smallint"),ie(se,le,pe,"\u2026","\\textellipsis"),ie(oe,le,pe,"\u2026","\\mathellipsis"),ie(se,le,pe,"\u2026","\\ldots",!0),ie(oe,le,pe,"\u2026","\\ldots",!0),ie(oe,le,pe,"\u22ef","\\@cdots",!0),ie(oe,le,pe,"\u22f1","\\ddots",!0),ie(oe,le,xe,"\u22ee","\\varvdots"),ie(oe,le,me,"\u02ca","\\acute"),ie(oe,le,me,"\u02cb","\\grave"),ie(oe,le,me,"\xa8","\\ddot"),ie(oe,le,me,"~","\\tilde"),ie(oe,le,me,"\u02c9","\\bar"),ie(oe,le,me,"\u02d8","\\breve"),ie(oe,le,me,"\u02c7","\\check"),ie(oe,le,me,"^","\\hat"),ie(oe,le,me,"\u20d7","\\vec"),ie(oe,le,me,"\u02d9","\\dot"),ie(oe,le,me,"\u02da","\\mathring"),ie(oe,le,de,"\ue131","\\@imath"),ie(oe,le,de,"\ue237","\\@jmath"),ie(oe,le,xe,"\u0131","\u0131"),ie(oe,le,xe,"\u0237","\u0237"),ie(se,le,xe,"\u0131","\\i",!0),ie(se,le,xe,"\u0237","\\j",!0),ie(se,le,xe,"\xdf","\\ss",!0),ie(se,le,xe,"\xe6","\\ae",!0),ie(se,le,xe,"\u0153","\\oe",!0),ie(se,le,xe,"\xf8","\\o",!0),ie(se,le,xe,"\xc6","\\AE",!0),ie(se,le,xe,"\u0152","\\OE",!0),ie(se,le,xe,"\xd8","\\O",!0),ie(se,le,me,"\u02ca","\\'"),ie(se,le,me,"\u02cb","\\`"),ie(se,le,me,"\u02c6","\\^"),ie(se,le,me,"\u02dc","\\~"),ie(se,le,me,"\u02c9","\\="),ie(se,le,me,"\u02d8","\\u"),ie(se,le,me,"\u02d9","\\."),ie(se,le,me,"\xb8","\\c"),ie(se,le,me,"\u02da","\\r"),ie(se,le,me,"\u02c7","\\v"),ie(se,le,me,"\xa8",'\\"'),ie(se,le,me,"\u02dd","\\H"),ie(se,le,me,"\u25ef","\\textcircled");var we={"--":!0,"---":!0,"``":!0,"''":!0};ie(se,le,xe,"\u2013","--",!0),ie(se,le,xe,"\u2013","\\textendash"),ie(se,le,xe,"\u2014","---",!0),ie(se,le,xe,"\u2014","\\textemdash"),ie(se,le,xe,"\u2018","`",!0),ie(se,le,xe,"\u2018","\\textquoteleft"),ie(se,le,xe,"\u2019","'",!0),ie(se,le,xe,"\u2019","\\textquoteright"),ie(se,le,xe,"\u201c","``",!0),ie(se,le,xe,"\u201c","\\textquotedblleft"),ie(se,le,xe,"\u201d","''",!0),ie(se,le,xe,"\u201d","\\textquotedblright"),ie(oe,le,xe,"\xb0","\\degree",!0),ie(se,le,xe,"\xb0","\\degree"),ie(se,le,xe,"\xb0","\\textdegree",!0),ie(oe,le,xe,"\xa3","\\pounds"),ie(oe,le,xe,"\xa3","\\mathsterling",!0),ie(se,le,xe,"\xa3","\\pounds"),ie(se,le,xe,"\xa3","\\textsterling",!0),ie(oe,he,xe,"\u2720","\\maltese"),ie(se,he,xe,"\u2720","\\maltese");for(var ke='0123456789/@."',Se=0;Set&&(t=i.height),i.depth>r&&(r=i.depth),i.maxFontSize>n&&(n=i.maxFontSize)}e.height=t,e.depth=r,e.maxFontSize=n},Xe=function(e,t,r,n){var a=new W(e,t,r,n);return Ye(a),a},We=function(e,t,r,n){return new W(e,t,r,n)},_e=function(e){var t=new A(e);return Ye(t),t},je=function(e,t,r){var n="";switch(e){case"amsrm":n="AMS";break;case"textrm":n="Main";break;case"textsf":n="SansSerif";break;case"texttt":n="Typewriter";break;default:n=e}return n+"-"+("textbf"===t&&"textit"===r?"BoldItalic":"textbf"===t?"Bold":"textit"===t?"Italic":"Regular")},$e={mathbf:{variant:"bold",fontName:"Main-Bold"},mathrm:{variant:"normal",fontName:"Main-Regular"},textit:{variant:"italic",fontName:"Main-Italic"},mathit:{variant:"italic",fontName:"Main-Italic"},mathnormal:{variant:"italic",fontName:"Math-Italic"},mathbb:{variant:"double-struck",fontName:"AMS-Regular"},mathcal:{variant:"script",fontName:"Caligraphic-Regular"},mathfrak:{variant:"fraktur",fontName:"Fraktur-Regular"},mathscr:{variant:"script",fontName:"Script-Regular"},mathsf:{variant:"sans-serif",fontName:"SansSerif-Regular"},mathtt:{variant:"monospace",fontName:"Typewriter-Regular"}},Ze={vec:["vec",.471,.714],oiintSize1:["oiintSize1",.957,.499],oiintSize2:["oiintSize2",1.472,.659],oiiintSize1:["oiiintSize1",1.304,.499],oiiintSize2:["oiiintSize2",1.98,.659]},Ke={fontMap:$e,makeSymbol:Ge,mathsym:function(e,t,r,n){return void 0===n&&(n=[]),"boldsymbol"===r.font&&Ve(e,"Main-Bold",t).metrics?Ge(e,"Main-Bold",t,r,n.concat(["mathbf"])):"\\"===e||"main"===ae[t][e].font?Ge(e,"Main-Regular",t,r,n):Ge(e,"AMS-Regular",t,r,n.concat(["amsrm"]))},makeSpan:Xe,makeSvgSpan:We,makeLineSpan:function(e,t,r){var n=Xe([e],[],t);return n.height=Math.max(r||t.fontMetrics().defaultRuleThickness,t.minRuleThickness),n.style.borderBottomWidth=V(n.height),n.maxFontSize=1,n},makeAnchor:function(e,t,r,n){var a=new _(e,t,r,n);return Ye(a),a},makeFragment:_e,wrapFragment:function(e,t){return e instanceof A?Xe([],[e],t):e},makeVList:function(e,t){for(var r=function(e){if("individualShift"===e.positionType){for(var t=e.children,r=[t[0]],n=-t[0].shift-t[0].elem.depth,a=n,i=1;i0&&(o.push(kt(s,t)),s=[]),o.push(a[l]));s.length>0&&o.push(kt(s,t)),r?((i=kt(ft(r,t,!0))).classes=["tag"],o.push(i)):n&&o.push(n);var m=mt(["katex-html"],o);if(m.setAttribute("aria-hidden","true"),i){var c=i.children[0];c.style.height=V(m.height+m.depth),m.depth&&(c.style.verticalAlign=V(-m.depth))}return m}function Mt(e){return new A(e)}var zt=function(){function e(e,t,r){this.type=void 0,this.attributes=void 0,this.children=void 0,this.classes=void 0,this.type=e,this.attributes={},this.children=t||[],this.classes=r||[]}var t=e.prototype;return t.setAttribute=function(e,t){this.attributes[e]=t},t.getAttribute=function(e){return this.attributes[e]},t.toNode=function(){var e=document.createElementNS("http://www.w3.org/1998/Math/MathML",this.type);for(var t in this.attributes)Object.prototype.hasOwnProperty.call(this.attributes,t)&&e.setAttribute(t,this.attributes[t]);this.classes.length>0&&(e.className=G(this.classes));for(var r=0;r0&&(e+=' class ="'+l.escape(G(this.classes))+'"'),e+=">";for(var r=0;r"},t.toText=function(){return this.children.map((function(e){return e.toText()})).join("")},e}(),At=function(){function e(e){this.text=void 0,this.text=e}var t=e.prototype;return t.toNode=function(){return document.createTextNode(this.text)},t.toMarkup=function(){return l.escape(this.toText())},t.toText=function(){return this.text},e}(),Tt={MathNode:zt,TextNode:At,SpaceNode:function(){function e(e){this.width=void 0,this.character=void 0,this.width=e,this.character=e>=.05555&&e<=.05556?"\u200a":e>=.1666&&e<=.1667?"\u2009":e>=.2222&&e<=.2223?"\u2005":e>=.2777&&e<=.2778?"\u2005\u200a":e>=-.05556&&e<=-.05555?"\u200a\u2063":e>=-.1667&&e<=-.1666?"\u2009\u2063":e>=-.2223&&e<=-.2222?"\u205f\u2063":e>=-.2778&&e<=-.2777?"\u2005\u2063":null}var t=e.prototype;return t.toNode=function(){if(this.character)return document.createTextNode(this.character);var e=document.createElementNS("http://www.w3.org/1998/Math/MathML","mspace");return e.setAttribute("width",V(this.width)),e},t.toMarkup=function(){return this.character?""+this.character+"":''},t.toText=function(){return this.character?this.character:" "},e}(),newDocumentFragment:Mt},Bt=function(e,t,r){return!ae[t][e]||!ae[t][e].replace||55349===e.charCodeAt(0)||we.hasOwnProperty(e)&&r&&(r.fontFamily&&"tt"===r.fontFamily.substr(4,2)||r.font&&"tt"===r.font.substr(4,2))||(e=ae[t][e].replace),new Tt.TextNode(e)},Ct=function(e){return 1===e.length?e[0]:new Tt.MathNode("mrow",e)},qt=function(e,t){if("texttt"===t.fontFamily)return"monospace";if("textsf"===t.fontFamily)return"textit"===t.fontShape&&"textbf"===t.fontWeight?"sans-serif-bold-italic":"textit"===t.fontShape?"sans-serif-italic":"textbf"===t.fontWeight?"bold-sans-serif":"sans-serif";if("textit"===t.fontShape&&"textbf"===t.fontWeight)return"bold-italic";if("textit"===t.fontShape)return"italic";if("textbf"===t.fontWeight)return"bold";var r=t.font;if(!r||"mathnormal"===r)return null;var n=e.mode;if("mathit"===r)return"italic";if("boldsymbol"===r)return"textord"===e.type?"bold":"bold-italic";if("mathbf"===r)return"bold";if("mathbb"===r)return"double-struck";if("mathfrak"===r)return"fraktur";if("mathscr"===r||"mathcal"===r)return"script";if("mathsf"===r)return"sans-serif";if("mathtt"===r)return"monospace";var a=e.text;return l.contains(["\\imath","\\jmath"],a)?null:(ae[n][a]&&ae[n][a].replace&&(a=ae[n][a].replace),q(a,Ke.fontMap[r].fontName,n)?Ke.fontMap[r].variant:null)},Nt=function(e,t,r){if(1===e.length){var n=Rt(e[0],t);return r&&n instanceof zt&&"mo"===n.type&&(n.setAttribute("lspace","0em"),n.setAttribute("rspace","0em")),[n]}for(var a,i=[],o=0;o0&&(p.text=p.text.slice(0,1)+"\u0338"+p.text.slice(1),i.pop())}}}i.push(s),a=s}return i},It=function(e,t,r){return Ct(Nt(e,t,r))},Rt=function(e,t){if(!e)return new Tt.MathNode("mrow");if(it[e.type])return it[e.type](e,t);throw new n("Got group of unknown type: '"+e.type+"'")};function Ot(e,t,r,n,a){var i,o=Nt(e,r);i=1===o.length&&o[0]instanceof zt&&l.contains(["mrow","mtable"],o[0].type)?o[0]:new Tt.MathNode("mrow",o);var s=new Tt.MathNode("annotation",[new Tt.TextNode(t)]);s.setAttribute("encoding","application/x-tex");var h=new Tt.MathNode("semantics",[i,s]),m=new Tt.MathNode("math",[h]);m.setAttribute("xmlns","http://www.w3.org/1998/Math/MathML"),n&&m.setAttribute("display","block");var c=a?"katex":"katex-mathml";return Ke.makeSpan([c],[m])}var Ht=function(e){return new E({style:e.displayMode?x.DISPLAY:x.TEXT,maxSize:e.maxSize,minRuleThickness:e.minRuleThickness})},Et=function(e,t){if(t.displayMode){var r=["katex-display"];t.leqno&&r.push("leqno"),t.fleqn&&r.push("fleqn"),e=Ke.makeSpan(r,[e])}return e},Lt=function(e,t,r){var n,a=Ht(r);if("mathml"===r.output)return Ot(e,t,a,r.displayMode,!0);if("html"===r.output){var i=St(e,a);n=Ke.makeSpan(["katex"],[i])}else{var o=Ot(e,t,a,r.displayMode,!1),s=St(e,a);n=Ke.makeSpan(["katex"],[o,s])}return Et(n,r)},Dt={widehat:"^",widecheck:"\u02c7",widetilde:"~",utilde:"~",overleftarrow:"\u2190",underleftarrow:"\u2190",xleftarrow:"\u2190",overrightarrow:"\u2192",underrightarrow:"\u2192",xrightarrow:"\u2192",underbrace:"\u23df",overbrace:"\u23de",overgroup:"\u23e0",undergroup:"\u23e1",overleftrightarrow:"\u2194",underleftrightarrow:"\u2194",xleftrightarrow:"\u2194",Overrightarrow:"\u21d2",xRightarrow:"\u21d2",overleftharpoon:"\u21bc",xleftharpoonup:"\u21bc",overrightharpoon:"\u21c0",xrightharpoonup:"\u21c0",xLeftarrow:"\u21d0",xLeftrightarrow:"\u21d4",xhookleftarrow:"\u21a9",xhookrightarrow:"\u21aa",xmapsto:"\u21a6",xrightharpoondown:"\u21c1",xleftharpoondown:"\u21bd",xrightleftharpoons:"\u21cc",xleftrightharpoons:"\u21cb",xtwoheadleftarrow:"\u219e",xtwoheadrightarrow:"\u21a0",xlongequal:"=",xtofrom:"\u21c4",xrightleftarrows:"\u21c4",xrightequilibrium:"\u21cc",xleftequilibrium:"\u21cb","\\cdrightarrow":"\u2192","\\cdleftarrow":"\u2190","\\cdlongequal":"="},Pt={overrightarrow:[["rightarrow"],.888,522,"xMaxYMin"],overleftarrow:[["leftarrow"],.888,522,"xMinYMin"],underrightarrow:[["rightarrow"],.888,522,"xMaxYMin"],underleftarrow:[["leftarrow"],.888,522,"xMinYMin"],xrightarrow:[["rightarrow"],1.469,522,"xMaxYMin"],"\\cdrightarrow":[["rightarrow"],3,522,"xMaxYMin"],xleftarrow:[["leftarrow"],1.469,522,"xMinYMin"],"\\cdleftarrow":[["leftarrow"],3,522,"xMinYMin"],Overrightarrow:[["doublerightarrow"],.888,560,"xMaxYMin"],xRightarrow:[["doublerightarrow"],1.526,560,"xMaxYMin"],xLeftarrow:[["doubleleftarrow"],1.526,560,"xMinYMin"],overleftharpoon:[["leftharpoon"],.888,522,"xMinYMin"],xleftharpoonup:[["leftharpoon"],.888,522,"xMinYMin"],xleftharpoondown:[["leftharpoondown"],.888,522,"xMinYMin"],overrightharpoon:[["rightharpoon"],.888,522,"xMaxYMin"],xrightharpoonup:[["rightharpoon"],.888,522,"xMaxYMin"],xrightharpoondown:[["rightharpoondown"],.888,522,"xMaxYMin"],xlongequal:[["longequal"],.888,334,"xMinYMin"],"\\cdlongequal":[["longequal"],3,334,"xMinYMin"],xtwoheadleftarrow:[["twoheadleftarrow"],.888,334,"xMinYMin"],xtwoheadrightarrow:[["twoheadrightarrow"],.888,334,"xMaxYMin"],overleftrightarrow:[["leftarrow","rightarrow"],.888,522],overbrace:[["leftbrace","midbrace","rightbrace"],1.6,548],underbrace:[["leftbraceunder","midbraceunder","rightbraceunder"],1.6,548],underleftrightarrow:[["leftarrow","rightarrow"],.888,522],xleftrightarrow:[["leftarrow","rightarrow"],1.75,522],xLeftrightarrow:[["doubleleftarrow","doublerightarrow"],1.75,560],xrightleftharpoons:[["leftharpoondownplus","rightharpoonplus"],1.75,716],xleftrightharpoons:[["leftharpoonplus","rightharpoondownplus"],1.75,716],xhookleftarrow:[["leftarrow","righthook"],1.08,522],xhookrightarrow:[["lefthook","rightarrow"],1.08,522],overlinesegment:[["leftlinesegment","rightlinesegment"],.888,522],underlinesegment:[["leftlinesegment","rightlinesegment"],.888,522],overgroup:[["leftgroup","rightgroup"],.888,342],undergroup:[["leftgroupunder","rightgroupunder"],.888,342],xmapsto:[["leftmapsto","rightarrow"],1.5,522],xtofrom:[["leftToFrom","rightToFrom"],1.75,528],xrightleftarrows:[["baraboveleftarrow","rightarrowabovebar"],1.75,901],xrightequilibrium:[["baraboveshortleftharpoon","rightharpoonaboveshortbar"],1.75,716],xleftequilibrium:[["shortbaraboveleftharpoon","shortrightharpoonabovebar"],1.75,716]},Ft=function(e,t,r,n,a){var i,o=e.height+e.depth+r+n;if(/fbox|color|angl/.test(t)){if(i=Ke.makeSpan(["stretchy",t],[],a),"fbox"===t){var s=a.color&&a.getColor();s&&(i.style.borderColor=s)}}else{var l=[];/^[bx]cancel$/.test(t)&&l.push(new Q({x1:"0",y1:"0",x2:"100%",y2:"100%","stroke-width":"0.046em"})),/^x?cancel$/.test(t)&&l.push(new Q({x1:"0",y1:"100%",x2:"100%",y2:"0","stroke-width":"0.046em"}));var h=new K(l,{width:"100%",height:V(o)});i=Ke.makeSvgSpan([],[h],a)}return i.height=o,i.style.height=V(o),i},Vt=function(e){var t=new Tt.MathNode("mo",[new Tt.TextNode(Dt[e.replace(/^\\/,"")])]);return t.setAttribute("stretchy","true"),t},Gt=function(e,t){var r=function(){var r=4e5,n=e.label.substr(1);if(l.contains(["widehat","widecheck","widetilde","utilde"],n)){var a,i,o,s="ordgroup"===(d=e.base).type?d.body.length:1;if(s>5)"widehat"===n||"widecheck"===n?(a=420,r=2364,o=.42,i=n+"4"):(a=312,r=2340,o=.34,i="tilde4");else{var h=[1,1,2,2,3,3][s];"widehat"===n||"widecheck"===n?(r=[0,1062,2364,2364,2364][h],a=[0,239,300,360,420][h],o=[0,.24,.3,.3,.36,.42][h],i=n+h):(r=[0,600,1033,2339,2340][h],a=[0,260,286,306,312][h],o=[0,.26,.286,.3,.306,.34][h],i="tilde"+h)}var m=new J(i),c=new K([m],{width:"100%",height:V(o),viewBox:"0 0 "+r+" "+a,preserveAspectRatio:"none"});return{span:Ke.makeSvgSpan([],[c],t),minWidth:0,height:o}}var u,p,d,f=[],g=Pt[n],v=g[0],b=g[1],y=g[2],x=y/1e3,w=v.length;if(1===w)u=["hide-tail"],p=[g[3]];else if(2===w)u=["halfarrow-left","halfarrow-right"],p=["xMinYMin","xMaxYMin"];else{if(3!==w)throw new Error("Correct katexImagesData or update code here to support\n "+w+" children.");u=["brace-left","brace-center","brace-right"],p=["xMinYMin","xMidYMin","xMaxYMin"]}for(var k=0;k0&&(n.style.minWidth=V(a)),n};function Ut(e,t){if(!e||e.type!==t)throw new Error("Expected node of type "+t+", but got "+(e?"node of type "+e.type:String(e)));return e}function Yt(e){var t=Xt(e);if(!t)throw new Error("Expected node of symbol group type, but got "+(e?"node of type "+e.type:String(e)));return t}function Xt(e){return e&&("atom"===e.type||re.hasOwnProperty(e.type))?e:null}var Wt=function(e,t){var r,n,a;e&&"supsub"===e.type?(r=(n=Ut(e.base,"accent")).base,e.base=r,a=function(e){if(e instanceof W)return e;throw new Error("Expected span but got "+String(e)+".")}(wt(e,t)),e.base=n):r=(n=Ut(e,"accent")).base;var i=wt(r,t.havingCrampedStyle()),o=0;if(n.isShifty&&l.isCharacterBox(r)){var s=l.getBaseElem(r);o=ee(wt(s,t.havingCrampedStyle())).skew}var h,m="\\c"===n.label,c=m?i.height+i.depth:Math.min(i.height,t.fontMetrics().xHeight);if(n.isStretchy)h=Gt(n,t),h=Ke.makeVList({positionType:"firstBaseline",children:[{type:"elem",elem:i},{type:"elem",elem:h,wrapperClasses:["svg-align"],wrapperStyle:o>0?{width:"calc(100% - "+V(2*o)+")",marginLeft:V(2*o)}:void 0}]},t);else{var u,p;"\\vec"===n.label?(u=Ke.staticSvg("vec",t),p=Ke.svgData.vec[1]):((u=ee(u=Ke.makeOrd({mode:n.mode,text:n.label},t,"textord"))).italic=0,p=u.width,m&&(c+=u.depth)),h=Ke.makeSpan(["accent-body"],[u]);var d="\\textcircled"===n.label;d&&(h.classes.push("accent-full"),c=i.height);var f=o;d||(f-=p/2),h.style.left=V(f),"\\textcircled"===n.label&&(h.style.top=".2em"),h=Ke.makeVList({positionType:"firstBaseline",children:[{type:"elem",elem:i},{type:"kern",size:-c},{type:"elem",elem:h}]},t)}var g=Ke.makeSpan(["mord","accent"],[h],t);return a?(a.children[0]=g,a.height=Math.max(g.height,a.height),a.classes[0]="mord",a):g},_t=function(e,t){var r=e.isStretchy?Vt(e.label):new Tt.MathNode("mo",[Bt(e.label,e.mode)]),n=new Tt.MathNode("mover",[Rt(e.base,t),r]);return n.setAttribute("accent","true"),n},jt=new RegExp(["\\acute","\\grave","\\ddot","\\tilde","\\bar","\\breve","\\check","\\hat","\\vec","\\dot","\\mathring"].map((function(e){return"\\"+e})).join("|"));ot({type:"accent",names:["\\acute","\\grave","\\ddot","\\tilde","\\bar","\\breve","\\check","\\hat","\\vec","\\dot","\\mathring","\\widecheck","\\widehat","\\widetilde","\\overrightarrow","\\overleftarrow","\\Overrightarrow","\\overleftrightarrow","\\overgroup","\\overlinesegment","\\overleftharpoon","\\overrightharpoon"],props:{numArgs:1},handler:function(e,t){var r=lt(t[0]),n=!jt.test(e.funcName),a=!n||"\\widehat"===e.funcName||"\\widetilde"===e.funcName||"\\widecheck"===e.funcName;return{type:"accent",mode:e.parser.mode,label:e.funcName,isStretchy:n,isShifty:a,base:r}},htmlBuilder:Wt,mathmlBuilder:_t}),ot({type:"accent",names:["\\'","\\`","\\^","\\~","\\=","\\u","\\.",'\\"',"\\c","\\r","\\H","\\v","\\textcircled"],props:{numArgs:1,allowedInText:!0,allowedInMath:!0,argTypes:["primitive"]},handler:function(e,t){var r=t[0],n=e.parser.mode;return"math"===n&&(e.parser.settings.reportNonstrict("mathVsTextAccents","LaTeX's accent "+e.funcName+" works only in text mode"),n="text"),{type:"accent",mode:n,label:e.funcName,isStretchy:!1,isShifty:!0,base:r}},htmlBuilder:Wt,mathmlBuilder:_t}),ot({type:"accentUnder",names:["\\underleftarrow","\\underrightarrow","\\underleftrightarrow","\\undergroup","\\underlinesegment","\\utilde"],props:{numArgs:1},handler:function(e,t){var r=e.parser,n=e.funcName,a=t[0];return{type:"accentUnder",mode:r.mode,label:n,base:a}},htmlBuilder:function(e,t){var r=wt(e.base,t),n=Gt(e,t),a="\\utilde"===e.label?.12:0,i=Ke.makeVList({positionType:"top",positionData:r.height,children:[{type:"elem",elem:n,wrapperClasses:["svg-align"]},{type:"kern",size:a},{type:"elem",elem:r}]},t);return Ke.makeSpan(["mord","accentunder"],[i],t)},mathmlBuilder:function(e,t){var r=Vt(e.label),n=new Tt.MathNode("munder",[Rt(e.base,t),r]);return n.setAttribute("accentunder","true"),n}});var $t=function(e){var t=new Tt.MathNode("mpadded",e?[e]:[]);return t.setAttribute("width","+0.6em"),t.setAttribute("lspace","0.3em"),t};ot({type:"xArrow",names:["\\xleftarrow","\\xrightarrow","\\xLeftarrow","\\xRightarrow","\\xleftrightarrow","\\xLeftrightarrow","\\xhookleftarrow","\\xhookrightarrow","\\xmapsto","\\xrightharpoondown","\\xrightharpoonup","\\xleftharpoondown","\\xleftharpoonup","\\xrightleftharpoons","\\xleftrightharpoons","\\xlongequal","\\xtwoheadrightarrow","\\xtwoheadleftarrow","\\xtofrom","\\xrightleftarrows","\\xrightequilibrium","\\xleftequilibrium","\\\\cdrightarrow","\\\\cdleftarrow","\\\\cdlongequal"],props:{numArgs:1,numOptionalArgs:1},handler:function(e,t,r){var n=e.parser,a=e.funcName;return{type:"xArrow",mode:n.mode,label:a,body:t[0],below:r[0]}},htmlBuilder:function(e,t){var r,n=t.style,a=t.havingStyle(n.sup()),i=Ke.wrapFragment(wt(e.body,a,t),t),o="\\x"===e.label.slice(0,2)?"x":"cd";i.classes.push(o+"-arrow-pad"),e.below&&(a=t.havingStyle(n.sub()),(r=Ke.wrapFragment(wt(e.below,a,t),t)).classes.push(o+"-arrow-pad"));var s,l=Gt(e,t),h=-t.fontMetrics().axisHeight+.5*l.height,m=-t.fontMetrics().axisHeight-.5*l.height-.111;if((i.depth>.25||"\\xleftequilibrium"===e.label)&&(m-=i.depth),r){var c=-t.fontMetrics().axisHeight+r.height+.5*l.height+.111;s=Ke.makeVList({positionType:"individualShift",children:[{type:"elem",elem:i,shift:m},{type:"elem",elem:l,shift:h},{type:"elem",elem:r,shift:c}]},t)}else s=Ke.makeVList({positionType:"individualShift",children:[{type:"elem",elem:i,shift:m},{type:"elem",elem:l,shift:h}]},t);return s.children[0].children[0].children[1].classes.push("svg-align"),Ke.makeSpan(["mrel","x-arrow"],[s],t)},mathmlBuilder:function(e,t){var r,n=Vt(e.label);if(n.setAttribute("minsize","x"===e.label.charAt(0)?"1.75em":"3.0em"),e.body){var a=$t(Rt(e.body,t));if(e.below){var i=$t(Rt(e.below,t));r=new Tt.MathNode("munderover",[n,i,a])}else r=new Tt.MathNode("mover",[n,a])}else if(e.below){var o=$t(Rt(e.below,t));r=new Tt.MathNode("munder",[n,o])}else r=$t(),r=new Tt.MathNode("mover",[n,r]);return r}});var Zt={">":"\\\\cdrightarrow","<":"\\\\cdleftarrow","=":"\\\\cdlongequal",A:"\\uparrow",V:"\\downarrow","|":"\\Vert",".":"no arrow"},Kt=function(e){return"textord"===e.type&&"@"===e.text};function Jt(e,t,r){var n=Zt[e];switch(n){case"\\\\cdrightarrow":case"\\\\cdleftarrow":return r.callFunction(n,[t[0]],[t[1]]);case"\\uparrow":case"\\downarrow":var a={type:"atom",text:n,mode:"math",family:"rel"},i={type:"ordgroup",mode:"math",body:[r.callFunction("\\\\cdleft",[t[0]],[]),r.callFunction("\\Big",[a],[]),r.callFunction("\\\\cdright",[t[1]],[])]};return r.callFunction("\\\\cdparent",[i],[]);case"\\\\cdlongequal":return r.callFunction("\\\\cdlongequal",[],[]);case"\\Vert":return r.callFunction("\\Big",[{type:"textord",text:"\\Vert",mode:"math"}],[]);default:return{type:"textord",text:" ",mode:"math"}}}ot({type:"cdlabel",names:["\\\\cdleft","\\\\cdright"],props:{numArgs:1},handler:function(e,t){var r=e.parser,n=e.funcName;return{type:"cdlabel",mode:r.mode,side:n.slice(4),label:t[0]}},htmlBuilder:function(e,t){var r=t.havingStyle(t.style.sup()),n=Ke.wrapFragment(wt(e.label,r,t),t);return n.classes.push("cd-label-"+e.side),n.style.bottom=V(.8-n.depth),n.height=0,n.depth=0,n},mathmlBuilder:function(e,t){var r=new Tt.MathNode("mrow",[Rt(e.label,t)]);return(r=new Tt.MathNode("mpadded",[r])).setAttribute("width","0"),"left"===e.side&&r.setAttribute("lspace","-1width"),r.setAttribute("voffset","0.7em"),(r=new Tt.MathNode("mstyle",[r])).setAttribute("displaystyle","false"),r.setAttribute("scriptlevel","1"),r}}),ot({type:"cdlabelparent",names:["\\\\cdparent"],props:{numArgs:1},handler:function(e,t){return{type:"cdlabelparent",mode:e.parser.mode,fragment:t[0]}},htmlBuilder:function(e,t){var r=Ke.wrapFragment(wt(e.fragment,t),t);return r.classes.push("cd-vert-arrow"),r},mathmlBuilder:function(e,t){return new Tt.MathNode("mrow",[Rt(e.fragment,t)])}}),ot({type:"textord",names:["\\@char"],props:{numArgs:1,allowedInText:!0},handler:function(e,t){for(var r=e.parser,a=Ut(t[0],"ordgroup").body,i="",o=0;o=1114111)throw new n("\\@char with invalid code point "+i);return l<=65535?s=String.fromCharCode(l):(l-=65536,s=String.fromCharCode(55296+(l>>10),56320+(1023&l))),{type:"textord",mode:r.mode,text:s}}});var Qt=function(e,t){var r=ft(e.body,t.withColor(e.color),!1);return Ke.makeFragment(r)},er=function(e,t){var r=Nt(e.body,t.withColor(e.color)),n=new Tt.MathNode("mstyle",r);return n.setAttribute("mathcolor",e.color),n};ot({type:"color",names:["\\textcolor"],props:{numArgs:2,allowedInText:!0,argTypes:["color","original"]},handler:function(e,t){var r=e.parser,n=Ut(t[0],"color-token").color,a=t[1];return{type:"color",mode:r.mode,color:n,body:ht(a)}},htmlBuilder:Qt,mathmlBuilder:er}),ot({type:"color",names:["\\color"],props:{numArgs:1,allowedInText:!0,argTypes:["color"]},handler:function(e,t){var r=e.parser,n=e.breakOnTokenText,a=Ut(t[0],"color-token").color;r.gullet.macros.set("\\current@color",a);var i=r.parseExpression(!0,n);return{type:"color",mode:r.mode,color:a,body:i}},htmlBuilder:Qt,mathmlBuilder:er}),ot({type:"cr",names:["\\\\"],props:{numArgs:0,numOptionalArgs:1,argTypes:["size"],allowedInText:!0},handler:function(e,t,r){var n=e.parser,a=r[0],i=!n.settings.displayMode||!n.settings.useStrictBehavior("newLineInDisplayMode","In LaTeX, \\\\ or \\newline does nothing in display mode");return{type:"cr",mode:n.mode,newLine:i,size:a&&Ut(a,"size").value}},htmlBuilder:function(e,t){var r=Ke.makeSpan(["mspace"],[],t);return e.newLine&&(r.classes.push("newline"),e.size&&(r.style.marginTop=V(F(e.size,t)))),r},mathmlBuilder:function(e,t){var r=new Tt.MathNode("mspace");return e.newLine&&(r.setAttribute("linebreak","newline"),e.size&&r.setAttribute("height",V(F(e.size,t)))),r}});var tr={"\\global":"\\global","\\long":"\\\\globallong","\\\\globallong":"\\\\globallong","\\def":"\\gdef","\\gdef":"\\gdef","\\edef":"\\xdef","\\xdef":"\\xdef","\\let":"\\\\globallet","\\futurelet":"\\\\globalfuture"},rr=function(e){var t=e.text;if(/^(?:[\\{}$&#^_]|EOF)$/.test(t))throw new n("Expected a control sequence",e);return t},nr=function(e,t,r,n){var a=e.gullet.macros.get(r.text);null==a&&(r.noexpand=!0,a={tokens:[r],numArgs:0,unexpandable:!e.gullet.isExpandable(r.text)}),e.gullet.macros.set(t,a,n)};ot({type:"internal",names:["\\global","\\long","\\\\globallong"],props:{numArgs:0,allowedInText:!0},handler:function(e){var t=e.parser,r=e.funcName;t.consumeSpaces();var a=t.fetch();if(tr[a.text])return"\\global"!==r&&"\\\\globallong"!==r||(a.text=tr[a.text]),Ut(t.parseFunction(),"internal");throw new n("Invalid token after macro prefix",a)}}),ot({type:"internal",names:["\\def","\\gdef","\\edef","\\xdef"],props:{numArgs:0,allowedInText:!0,primitive:!0},handler:function(e){var t=e.parser,r=e.funcName,a=t.gullet.popToken(),i=a.text;if(/^(?:[\\{}$&#^_]|EOF)$/.test(i))throw new n("Expected a control sequence",a);for(var o,s=0,l=[[]];"{"!==t.gullet.future().text;)if("#"===(a=t.gullet.popToken()).text){if("{"===t.gullet.future().text){o=t.gullet.future(),l[s].push("{");break}if(a=t.gullet.popToken(),!/^[1-9]$/.test(a.text))throw new n('Invalid argument number "'+a.text+'"');if(parseInt(a.text)!==s+1)throw new n('Argument number "'+a.text+'" out of order');s++,l.push([])}else{if("EOF"===a.text)throw new n("Expected a macro definition");l[s].push(a.text)}var h=t.gullet.consumeArg().tokens;return o&&h.unshift(o),"\\edef"!==r&&"\\xdef"!==r||(h=t.gullet.expandTokens(h)).reverse(),t.gullet.macros.set(i,{tokens:h,numArgs:s,delimiters:l},r===tr[r]),{type:"internal",mode:t.mode}}}),ot({type:"internal",names:["\\let","\\\\globallet"],props:{numArgs:0,allowedInText:!0,primitive:!0},handler:function(e){var t=e.parser,r=e.funcName,n=rr(t.gullet.popToken());t.gullet.consumeSpaces();var a=function(e){var t=e.gullet.popToken();return"="===t.text&&" "===(t=e.gullet.popToken()).text&&(t=e.gullet.popToken()),t}(t);return nr(t,n,a,"\\\\globallet"===r),{type:"internal",mode:t.mode}}}),ot({type:"internal",names:["\\futurelet","\\\\globalfuture"],props:{numArgs:0,allowedInText:!0,primitive:!0},handler:function(e){var t=e.parser,r=e.funcName,n=rr(t.gullet.popToken()),a=t.gullet.popToken(),i=t.gullet.popToken();return nr(t,n,i,"\\\\globalfuture"===r),t.gullet.pushToken(i),t.gullet.pushToken(a),{type:"internal",mode:t.mode}}});var ar=function(e,t,r){var n=q(ae.math[e]&&ae.math[e].replace||e,t,r);if(!n)throw new Error("Unsupported symbol "+e+" and font size "+t+".");return n},ir=function(e,t,r,n){var a=r.havingBaseStyle(t),i=Ke.makeSpan(n.concat(a.sizingClasses(r)),[e],r),o=a.sizeMultiplier/r.sizeMultiplier;return i.height*=o,i.depth*=o,i.maxFontSize=a.sizeMultiplier,i},or=function(e,t,r){var n=t.havingBaseStyle(r),a=(1-t.sizeMultiplier/n.sizeMultiplier)*t.fontMetrics().axisHeight;e.classes.push("delimcenter"),e.style.top=V(a),e.height-=a,e.depth+=a},sr=function(e,t,r,n,a,i){var o=function(e,t,r,n){return Ke.makeSymbol(e,"Size"+t+"-Regular",r,n)}(e,t,a,n),s=ir(Ke.makeSpan(["delimsizing","size"+t],[o],n),x.TEXT,n,i);return r&&or(s,n,x.TEXT),s},lr=function(e,t,r){var n;return n="Size1-Regular"===t?"delim-size1":"delim-size4",{type:"elem",elem:Ke.makeSpan(["delimsizinginner",n],[Ke.makeSpan([],[Ke.makeSymbol(e,t,r)])])}},hr=function(e,t,r){var n=T["Size4-Regular"][e.charCodeAt(0)]?T["Size4-Regular"][e.charCodeAt(0)][4]:T["Size1-Regular"][e.charCodeAt(0)][4],a=new J("inner",function(e,t){switch(e){case"\u239c":return"M291 0 H417 V"+t+" H291z M291 0 H417 V"+t+" H291z";case"\u2223":return"M145 0 H188 V"+t+" H145z M145 0 H188 V"+t+" H145z";case"\u2225":return"M145 0 H188 V"+t+" H145z M145 0 H188 V"+t+" H145zM367 0 H410 V"+t+" H367z M367 0 H410 V"+t+" H367z";case"\u239f":return"M457 0 H583 V"+t+" H457z M457 0 H583 V"+t+" H457z";case"\u23a2":return"M319 0 H403 V"+t+" H319z M319 0 H403 V"+t+" H319z";case"\u23a5":return"M263 0 H347 V"+t+" H263z M263 0 H347 V"+t+" H263z";case"\u23aa":return"M384 0 H504 V"+t+" H384z M384 0 H504 V"+t+" H384z";case"\u23d0":return"M312 0 H355 V"+t+" H312z M312 0 H355 V"+t+" H312z";case"\u2016":return"M257 0 H300 V"+t+" H257z M257 0 H300 V"+t+" H257zM478 0 H521 V"+t+" H478z M478 0 H521 V"+t+" H478z";default:return""}}(e,Math.round(1e3*t))),i=new K([a],{width:V(n),height:V(t),style:"width:"+V(n),viewBox:"0 0 "+1e3*n+" "+Math.round(1e3*t),preserveAspectRatio:"xMinYMin"}),o=Ke.makeSvgSpan([],[i],r);return o.height=t,o.style.height=V(t),o.style.width=V(n),{type:"elem",elem:o}},mr={type:"kern",size:-.008},cr=["|","\\lvert","\\rvert","\\vert"],ur=["\\|","\\lVert","\\rVert","\\Vert"],pr=function(e,t,r,n,a,i){var o,s,h,m;o=h=m=e,s=null;var c="Size1-Regular";"\\uparrow"===e?h=m="\u23d0":"\\Uparrow"===e?h=m="\u2016":"\\downarrow"===e?o=h="\u23d0":"\\Downarrow"===e?o=h="\u2016":"\\updownarrow"===e?(o="\\uparrow",h="\u23d0",m="\\downarrow"):"\\Updownarrow"===e?(o="\\Uparrow",h="\u2016",m="\\Downarrow"):l.contains(cr,e)?h="\u2223":l.contains(ur,e)?h="\u2225":"["===e||"\\lbrack"===e?(o="\u23a1",h="\u23a2",m="\u23a3",c="Size4-Regular"):"]"===e||"\\rbrack"===e?(o="\u23a4",h="\u23a5",m="\u23a6",c="Size4-Regular"):"\\lfloor"===e||"\u230a"===e?(h=o="\u23a2",m="\u23a3",c="Size4-Regular"):"\\lceil"===e||"\u2308"===e?(o="\u23a1",h=m="\u23a2",c="Size4-Regular"):"\\rfloor"===e||"\u230b"===e?(h=o="\u23a5",m="\u23a6",c="Size4-Regular"):"\\rceil"===e||"\u2309"===e?(o="\u23a4",h=m="\u23a5",c="Size4-Regular"):"("===e||"\\lparen"===e?(o="\u239b",h="\u239c",m="\u239d",c="Size4-Regular"):")"===e||"\\rparen"===e?(o="\u239e",h="\u239f",m="\u23a0",c="Size4-Regular"):"\\{"===e||"\\lbrace"===e?(o="\u23a7",s="\u23a8",m="\u23a9",h="\u23aa",c="Size4-Regular"):"\\}"===e||"\\rbrace"===e?(o="\u23ab",s="\u23ac",m="\u23ad",h="\u23aa",c="Size4-Regular"):"\\lgroup"===e||"\u27ee"===e?(o="\u23a7",m="\u23a9",h="\u23aa",c="Size4-Regular"):"\\rgroup"===e||"\u27ef"===e?(o="\u23ab",m="\u23ad",h="\u23aa",c="Size4-Regular"):"\\lmoustache"===e||"\u23b0"===e?(o="\u23a7",m="\u23ad",h="\u23aa",c="Size4-Regular"):"\\rmoustache"!==e&&"\u23b1"!==e||(o="\u23ab",m="\u23a9",h="\u23aa",c="Size4-Regular");var u=ar(o,c,a),p=u.height+u.depth,d=ar(h,c,a),f=d.height+d.depth,g=ar(m,c,a),v=g.height+g.depth,b=0,y=1;if(null!==s){var w=ar(s,c,a);b=w.height+w.depth,y=2}var k=p+v+b,S=k+Math.max(0,Math.ceil((t-k)/(y*f)))*y*f,M=n.fontMetrics().axisHeight;r&&(M*=n.sizeMultiplier);var z=S/2-M,A=[];if(A.push(lr(m,c,a)),A.push(mr),null===s){var T=S-p-v+.016;A.push(hr(h,T,n))}else{var B=(S-p-v-b)/2+.016;A.push(hr(h,B,n)),A.push(mr),A.push(lr(s,c,a)),A.push(mr),A.push(hr(h,B,n))}A.push(mr),A.push(lr(o,c,a));var C=n.havingBaseStyle(x.TEXT),q=Ke.makeVList({positionType:"bottom",positionData:z,children:A},C);return ir(Ke.makeSpan(["delimsizing","mult"],[q],C),x.TEXT,n,i)},dr=.08,fr=function(e,t,r,n,a){var i=function(e,t,r){t*=1e3;var n="";switch(e){case"sqrtMain":n=function(e,t){return"M95,"+(622+e+t)+"\nc-2.7,0,-7.17,-2.7,-13.5,-8c-5.8,-5.3,-9.5,-10,-9.5,-14\nc0,-2,0.3,-3.3,1,-4c1.3,-2.7,23.83,-20.7,67.5,-54\nc44.2,-33.3,65.8,-50.3,66.5,-51c1.3,-1.3,3,-2,5,-2c4.7,0,8.7,3.3,12,10\ns173,378,173,378c0.7,0,35.3,-71,104,-213c68.7,-142,137.5,-285,206.5,-429\nc69,-144,104.5,-217.7,106.5,-221\nl"+e/2.075+" -"+e+"\nc5.3,-9.3,12,-14,20,-14\nH400000v"+(40+e)+"H845.2724\ns-225.272,467,-225.272,467s-235,486,-235,486c-2.7,4.7,-9,7,-19,7\nc-6,0,-10,-1,-12,-3s-194,-422,-194,-422s-65,47,-65,47z\nM"+(834+e)+" "+t+"h400000v"+(40+e)+"h-400000z"}(t,M);break;case"sqrtSize1":n=function(e,t){return"M263,"+(601+e+t)+"c0.7,0,18,39.7,52,119\nc34,79.3,68.167,158.7,102.5,238c34.3,79.3,51.8,119.3,52.5,120\nc340,-704.7,510.7,-1060.3,512,-1067\nl"+e/2.084+" -"+e+"\nc4.7,-7.3,11,-11,19,-11\nH40000v"+(40+e)+"H1012.3\ns-271.3,567,-271.3,567c-38.7,80.7,-84,175,-136,283c-52,108,-89.167,185.3,-111.5,232\nc-22.3,46.7,-33.8,70.3,-34.5,71c-4.7,4.7,-12.3,7,-23,7s-12,-1,-12,-1\ns-109,-253,-109,-253c-72.7,-168,-109.3,-252,-110,-252c-10.7,8,-22,16.7,-34,26\nc-22,17.3,-33.3,26,-34,26s-26,-26,-26,-26s76,-59,76,-59s76,-60,76,-60z\nM"+(1001+e)+" "+t+"h400000v"+(40+e)+"h-400000z"}(t,M);break;case"sqrtSize2":n=function(e,t){return"M983 "+(10+e+t)+"\nl"+e/3.13+" -"+e+"\nc4,-6.7,10,-10,18,-10 H400000v"+(40+e)+"\nH1013.1s-83.4,268,-264.1,840c-180.7,572,-277,876.3,-289,913c-4.7,4.7,-12.7,7,-24,7\ns-12,0,-12,0c-1.3,-3.3,-3.7,-11.7,-7,-25c-35.3,-125.3,-106.7,-373.3,-214,-744\nc-10,12,-21,25,-33,39s-32,39,-32,39c-6,-5.3,-15,-14,-27,-26s25,-30,25,-30\nc26.7,-32.7,52,-63,76,-91s52,-60,52,-60s208,722,208,722\nc56,-175.3,126.3,-397.3,211,-666c84.7,-268.7,153.8,-488.2,207.5,-658.5\nc53.7,-170.3,84.5,-266.8,92.5,-289.5z\nM"+(1001+e)+" "+t+"h400000v"+(40+e)+"h-400000z"}(t,M);break;case"sqrtSize3":n=function(e,t){return"M424,"+(2398+e+t)+"\nc-1.3,-0.7,-38.5,-172,-111.5,-514c-73,-342,-109.8,-513.3,-110.5,-514\nc0,-2,-10.7,14.3,-32,49c-4.7,7.3,-9.8,15.7,-15.5,25c-5.7,9.3,-9.8,16,-12.5,20\ns-5,7,-5,7c-4,-3.3,-8.3,-7.7,-13,-13s-13,-13,-13,-13s76,-122,76,-122s77,-121,77,-121\ns209,968,209,968c0,-2,84.7,-361.7,254,-1079c169.3,-717.3,254.7,-1077.7,256,-1081\nl"+e/4.223+" -"+e+"c4,-6.7,10,-10,18,-10 H400000\nv"+(40+e)+"H1014.6\ns-87.3,378.7,-272.6,1166c-185.3,787.3,-279.3,1182.3,-282,1185\nc-2,6,-10,9,-24,9\nc-8,0,-12,-0.7,-12,-2z M"+(1001+e)+" "+t+"\nh400000v"+(40+e)+"h-400000z"}(t,M);break;case"sqrtSize4":n=function(e,t){return"M473,"+(2713+e+t)+"\nc339.3,-1799.3,509.3,-2700,510,-2702 l"+e/5.298+" -"+e+"\nc3.3,-7.3,9.3,-11,18,-11 H400000v"+(40+e)+"H1017.7\ns-90.5,478,-276.2,1466c-185.7,988,-279.5,1483,-281.5,1485c-2,6,-10,9,-24,9\nc-8,0,-12,-0.7,-12,-2c0,-1.3,-5.3,-32,-16,-92c-50.7,-293.3,-119.7,-693.3,-207,-1200\nc0,-1.3,-5.3,8.7,-16,30c-10.7,21.3,-21.3,42.7,-32,64s-16,33,-16,33s-26,-26,-26,-26\ns76,-153,76,-153s77,-151,77,-151c0.7,0.7,35.7,202,105,604c67.3,400.7,102,602.7,104,\n606zM"+(1001+e)+" "+t+"h400000v"+(40+e)+"H1017.7z"}(t,M);break;case"sqrtTall":n=function(e,t,r){return"M702 "+(e+t)+"H400000"+(40+e)+"\nH742v"+(r-54-t-e)+"l-4 4-4 4c-.667.7 -2 1.5-4 2.5s-4.167 1.833-6.5 2.5-5.5 1-9.5 1\nh-12l-28-84c-16.667-52-96.667 -294.333-240-727l-212 -643 -85 170\nc-4-3.333-8.333-7.667-13 -13l-13-13l77-155 77-156c66 199.333 139 419.667\n219 661 l218 661zM702 "+t+"H400000v"+(40+e)+"H742z"}(t,M,r)}return n}(e,n,r),o=new J(e,i),s=new K([o],{width:"400em",height:V(t),viewBox:"0 0 400000 "+r,preserveAspectRatio:"xMinYMin slice"});return Ke.makeSvgSpan(["hide-tail"],[s],a)},gr=["(","\\lparen",")","\\rparen","[","\\lbrack","]","\\rbrack","\\{","\\lbrace","\\}","\\rbrace","\\lfloor","\\rfloor","\u230a","\u230b","\\lceil","\\rceil","\u2308","\u2309","\\surd"],vr=["\\uparrow","\\downarrow","\\updownarrow","\\Uparrow","\\Downarrow","\\Updownarrow","|","\\|","\\vert","\\Vert","\\lvert","\\rvert","\\lVert","\\rVert","\\lgroup","\\rgroup","\u27ee","\u27ef","\\lmoustache","\\rmoustache","\u23b0","\u23b1"],br=["<",">","\\langle","\\rangle","/","\\backslash","\\lt","\\gt"],yr=[0,1.2,1.8,2.4,3],xr=[{type:"small",style:x.SCRIPTSCRIPT},{type:"small",style:x.SCRIPT},{type:"small",style:x.TEXT},{type:"large",size:1},{type:"large",size:2},{type:"large",size:3},{type:"large",size:4}],wr=[{type:"small",style:x.SCRIPTSCRIPT},{type:"small",style:x.SCRIPT},{type:"small",style:x.TEXT},{type:"stack"}],kr=[{type:"small",style:x.SCRIPTSCRIPT},{type:"small",style:x.SCRIPT},{type:"small",style:x.TEXT},{type:"large",size:1},{type:"large",size:2},{type:"large",size:3},{type:"large",size:4},{type:"stack"}],Sr=function(e){if("small"===e.type)return"Main-Regular";if("large"===e.type)return"Size"+e.size+"-Regular";if("stack"===e.type)return"Size4-Regular";throw new Error("Add support for delim type '"+e.type+"' here.")},Mr=function(e,t,r,n){for(var a=Math.min(2,3-n.style.size);at)return r[a]}return r[r.length-1]},zr=function(e,t,r,n,a,i){var o;"<"===e||"\\lt"===e||"\u27e8"===e?e="\\langle":">"!==e&&"\\gt"!==e&&"\u27e9"!==e||(e="\\rangle"),o=l.contains(br,e)?xr:l.contains(gr,e)?kr:wr;var s=Mr(e,t,o,n);return"small"===s.type?function(e,t,r,n,a,i){var o=Ke.makeSymbol(e,"Main-Regular",a,n),s=ir(o,t,n,i);return r&&or(s,n,t),s}(e,s.style,r,n,a,i):"large"===s.type?sr(e,s.size,r,n,a,i):pr(e,t,r,n,a,i)},Ar={sqrtImage:function(e,t){var r,n,a=t.havingBaseSizing(),i=Mr("\\surd",e*a.sizeMultiplier,kr,a),o=a.sizeMultiplier,s=Math.max(0,t.minRuleThickness-t.fontMetrics().sqrtRuleThickness),l=0,h=0,m=0;return"small"===i.type?(e<1?o=1:e<1.4&&(o=.7),h=(1+s)/o,(r=fr("sqrtMain",l=(1+s+dr)/o,m=1e3+1e3*s+80,s,t)).style.minWidth="0.853em",n=.833/o):"large"===i.type?(m=1080*yr[i.size],h=(yr[i.size]+s)/o,l=(yr[i.size]+s+dr)/o,(r=fr("sqrtSize"+i.size,l,m,s,t)).style.minWidth="1.02em",n=1/o):(l=e+s+dr,h=e+s,m=Math.floor(1e3*e+s)+80,(r=fr("sqrtTall",l,m,s,t)).style.minWidth="0.742em",n=1.056),r.height=h,r.style.height=V(l),{span:r,advanceWidth:n,ruleWidth:(t.fontMetrics().sqrtRuleThickness+s)*o}},sizedDelim:function(e,t,r,a,i){if("<"===e||"\\lt"===e||"\u27e8"===e?e="\\langle":">"!==e&&"\\gt"!==e&&"\u27e9"!==e||(e="\\rangle"),l.contains(gr,e)||l.contains(br,e))return sr(e,t,!1,r,a,i);if(l.contains(vr,e))return pr(e,yr[t],!1,r,a,i);throw new n("Illegal delimiter: '"+e+"'")},sizeToMaxHeight:yr,customSizedDelim:zr,leftRightDelim:function(e,t,r,n,a,i){var o=n.fontMetrics().axisHeight*n.sizeMultiplier,s=5/n.fontMetrics().ptPerEm,l=Math.max(t-o,r+o),h=Math.max(l/500*901,2*l-s);return zr(e,h,!0,n,a,i)}},Tr={"\\bigl":{mclass:"mopen",size:1},"\\Bigl":{mclass:"mopen",size:2},"\\biggl":{mclass:"mopen",size:3},"\\Biggl":{mclass:"mopen",size:4},"\\bigr":{mclass:"mclose",size:1},"\\Bigr":{mclass:"mclose",size:2},"\\biggr":{mclass:"mclose",size:3},"\\Biggr":{mclass:"mclose",size:4},"\\bigm":{mclass:"mrel",size:1},"\\Bigm":{mclass:"mrel",size:2},"\\biggm":{mclass:"mrel",size:3},"\\Biggm":{mclass:"mrel",size:4},"\\big":{mclass:"mord",size:1},"\\Big":{mclass:"mord",size:2},"\\bigg":{mclass:"mord",size:3},"\\Bigg":{mclass:"mord",size:4}},Br=["(","\\lparen",")","\\rparen","[","\\lbrack","]","\\rbrack","\\{","\\lbrace","\\}","\\rbrace","\\lfloor","\\rfloor","\u230a","\u230b","\\lceil","\\rceil","\u2308","\u2309","<",">","\\langle","\u27e8","\\rangle","\u27e9","\\lt","\\gt","\\lvert","\\rvert","\\lVert","\\rVert","\\lgroup","\\rgroup","\u27ee","\u27ef","\\lmoustache","\\rmoustache","\u23b0","\u23b1","/","\\backslash","|","\\vert","\\|","\\Vert","\\uparrow","\\Uparrow","\\downarrow","\\Downarrow","\\updownarrow","\\Updownarrow","."];function Cr(e,t){var r=Xt(e);if(r&&l.contains(Br,r.text))return r;throw new n(r?"Invalid delimiter '"+r.text+"' after '"+t.funcName+"'":"Invalid delimiter type '"+e.type+"'",e)}function qr(e){if(!e.body)throw new Error("Bug: The leftright ParseNode wasn't fully parsed.")}ot({type:"delimsizing",names:["\\bigl","\\Bigl","\\biggl","\\Biggl","\\bigr","\\Bigr","\\biggr","\\Biggr","\\bigm","\\Bigm","\\biggm","\\Biggm","\\big","\\Big","\\bigg","\\Bigg"],props:{numArgs:1,argTypes:["primitive"]},handler:function(e,t){var r=Cr(t[0],e);return{type:"delimsizing",mode:e.parser.mode,size:Tr[e.funcName].size,mclass:Tr[e.funcName].mclass,delim:r.text}},htmlBuilder:function(e,t){return"."===e.delim?Ke.makeSpan([e.mclass]):Ar.sizedDelim(e.delim,e.size,t,e.mode,[e.mclass])},mathmlBuilder:function(e){var t=[];"."!==e.delim&&t.push(Bt(e.delim,e.mode));var r=new Tt.MathNode("mo",t);"mopen"===e.mclass||"mclose"===e.mclass?r.setAttribute("fence","true"):r.setAttribute("fence","false"),r.setAttribute("stretchy","true");var n=V(Ar.sizeToMaxHeight[e.size]);return r.setAttribute("minsize",n),r.setAttribute("maxsize",n),r}}),ot({type:"leftright-right",names:["\\right"],props:{numArgs:1,primitive:!0},handler:function(e,t){var r=e.parser.gullet.macros.get("\\current@color");if(r&&"string"!=typeof r)throw new n("\\current@color set to non-string in \\right");return{type:"leftright-right",mode:e.parser.mode,delim:Cr(t[0],e).text,color:r}}}),ot({type:"leftright",names:["\\left"],props:{numArgs:1,primitive:!0},handler:function(e,t){var r=Cr(t[0],e),n=e.parser;++n.leftrightDepth;var a=n.parseExpression(!1);--n.leftrightDepth,n.expect("\\right",!1);var i=Ut(n.parseFunction(),"leftright-right");return{type:"leftright",mode:n.mode,body:a,left:r.text,right:i.delim,rightColor:i.color}},htmlBuilder:function(e,t){qr(e);for(var r,n,a=ft(e.body,t,!0,["mopen","mclose"]),i=0,o=0,s=!1,l=0;l-1?"mpadded":"menclose",[Rt(e.body,t)]);switch(e.label){case"\\cancel":n.setAttribute("notation","updiagonalstrike");break;case"\\bcancel":n.setAttribute("notation","downdiagonalstrike");break;case"\\phase":n.setAttribute("notation","phasorangle");break;case"\\sout":n.setAttribute("notation","horizontalstrike");break;case"\\fbox":n.setAttribute("notation","box");break;case"\\angl":n.setAttribute("notation","actuarial");break;case"\\fcolorbox":case"\\colorbox":if(r=t.fontMetrics().fboxsep*t.fontMetrics().ptPerEm,n.setAttribute("width","+"+2*r+"pt"),n.setAttribute("height","+"+2*r+"pt"),n.setAttribute("lspace",r+"pt"),n.setAttribute("voffset",r+"pt"),"\\fcolorbox"===e.label){var a=Math.max(t.fontMetrics().fboxrule,t.minRuleThickness);n.setAttribute("style","border: "+a+"em solid "+String(e.borderColor))}break;case"\\xcancel":n.setAttribute("notation","updiagonalstrike downdiagonalstrike")}return e.backgroundColor&&n.setAttribute("mathbackground",e.backgroundColor),n};ot({type:"enclose",names:["\\colorbox"],props:{numArgs:2,allowedInText:!0,argTypes:["color","text"]},handler:function(e,t,r){var n=e.parser,a=e.funcName,i=Ut(t[0],"color-token").color,o=t[1];return{type:"enclose",mode:n.mode,label:a,backgroundColor:i,body:o}},htmlBuilder:Nr,mathmlBuilder:Ir}),ot({type:"enclose",names:["\\fcolorbox"],props:{numArgs:3,allowedInText:!0,argTypes:["color","color","text"]},handler:function(e,t,r){var n=e.parser,a=e.funcName,i=Ut(t[0],"color-token").color,o=Ut(t[1],"color-token").color,s=t[2];return{type:"enclose",mode:n.mode,label:a,backgroundColor:o,borderColor:i,body:s}},htmlBuilder:Nr,mathmlBuilder:Ir}),ot({type:"enclose",names:["\\fbox"],props:{numArgs:1,argTypes:["hbox"],allowedInText:!0},handler:function(e,t){return{type:"enclose",mode:e.parser.mode,label:"\\fbox",body:t[0]}}}),ot({type:"enclose",names:["\\cancel","\\bcancel","\\xcancel","\\sout","\\phase"],props:{numArgs:1},handler:function(e,t){var r=e.parser,n=e.funcName,a=t[0];return{type:"enclose",mode:r.mode,label:n,body:a}},htmlBuilder:Nr,mathmlBuilder:Ir}),ot({type:"enclose",names:["\\angl"],props:{numArgs:1,argTypes:["hbox"],allowedInText:!1},handler:function(e,t){return{type:"enclose",mode:e.parser.mode,label:"\\angl",body:t[0]}}});var Rr={};function Or(e){for(var t=e.type,r=e.names,n=e.props,a=e.handler,i=e.htmlBuilder,o=e.mathmlBuilder,s={type:t,numArgs:n.numArgs||0,allowedInText:!1,numOptionalArgs:0,handler:a},l=0;l1||!c)&&g.pop(),b.length0&&(y+=.25),m.push({pos:y,isDashed:e[t]})}for(w(o[0]),r=0;r0&&(M<(B+=b)&&(M=B),B=0),e.addJot&&(M+=f),z.height=S,z.depth=M,y+=S,z.pos=y,y+=M+B,h[r]=z,w(o[r+1])}var C,q,N=y/2+t.fontMetrics().axisHeight,I=e.cols||[],R=[],O=[];if(e.tags&&e.tags.some((function(e){return e})))for(r=0;r=s)){var W=void 0;(a>0||e.hskipBeforeAndAfter)&&0!==(W=l.deflt(P.pregap,p))&&((C=Ke.makeSpan(["arraycolsep"],[])).style.width=V(W),R.push(C));var _=[];for(r=0;r0){for(var K=Ke.makeLineSpan("hline",t,c),J=Ke.makeLineSpan("hdashline",t,c),Q=[{type:"elem",elem:h,shift:0}];m.length>0;){var ee=m.pop(),te=ee.pos-N;ee.isDashed?Q.push({type:"elem",elem:J,shift:te}):Q.push({type:"elem",elem:K,shift:te})}h=Ke.makeVList({positionType:"individualShift",children:Q},t)}if(0===O.length)return Ke.makeSpan(["mord"],[h],t);var re=Ke.makeVList({positionType:"individualShift",children:O},t);return re=Ke.makeSpan(["tag"],[re],t),Ke.makeFragment([h,re])},Xr={c:"center ",l:"left ",r:"right "},Wr=function(e,t){for(var r=[],n=new Tt.MathNode("mtd",[],["mtr-glue"]),a=new Tt.MathNode("mtd",[],["mml-eqn-num"]),i=0;i0){var p=e.cols,d="",f=!1,g=0,v=p.length;"separator"===p[0].type&&(c+="top ",g=1),"separator"===p[p.length-1].type&&(c+="bottom ",v-=1);for(var b=g;b0?"left ":"",c+=S[S.length-1].length>0?"right ":"";for(var M=1;M-1?"alignat":"align",o="split"===e.envName,s=Gr(e.parser,{cols:a,addJot:!0,autoTag:o?void 0:Vr(e.envName),emptySingleRow:!0,colSeparationType:i,maxNumCols:o?2:void 0,leqno:e.parser.settings.leqno},"display"),l=0,h={type:"ordgroup",mode:e.mode,body:[]};if(t[0]&&"ordgroup"===t[0].type){for(var m="",c=0;c0&&u&&(f=1),a[p]={type:"align",align:d,pregap:f,postgap:0}}return s.colSeparationType=u?"align":"alignat",s};Or({type:"array",names:["array","darray"],props:{numArgs:1},handler:function(e,t){var r=(Xt(t[0])?[t[0]]:Ut(t[0],"ordgroup").body).map((function(e){var t=Yt(e).text;if(-1!=="lcr".indexOf(t))return{type:"align",align:t};if("|"===t)return{type:"separator",separator:"|"};if(":"===t)return{type:"separator",separator:":"};throw new n("Unknown column alignment: "+t,e)})),a={cols:r,hskipBeforeAndAfter:!0,maxNumCols:r.length};return Gr(e.parser,a,Ur(e.envName))},htmlBuilder:Yr,mathmlBuilder:Wr}),Or({type:"array",names:["matrix","pmatrix","bmatrix","Bmatrix","vmatrix","Vmatrix","matrix*","pmatrix*","bmatrix*","Bmatrix*","vmatrix*","Vmatrix*"],props:{numArgs:0},handler:function(e){var t={matrix:null,pmatrix:["(",")"],bmatrix:["[","]"],Bmatrix:["\\{","\\}"],vmatrix:["|","|"],Vmatrix:["\\Vert","\\Vert"]}[e.envName.replace("*","")],r="c",a={hskipBeforeAndAfter:!1,cols:[{type:"align",align:r}]};if("*"===e.envName.charAt(e.envName.length-1)){var i=e.parser;if(i.consumeSpaces(),"["===i.fetch().text){if(i.consume(),i.consumeSpaces(),r=i.fetch().text,-1==="lcr".indexOf(r))throw new n("Expected l or c or r",i.nextToken);i.consume(),i.consumeSpaces(),i.expect("]"),i.consume(),a.cols=[{type:"align",align:r}]}}var o=Gr(e.parser,a,Ur(e.envName)),s=Math.max.apply(Math,[0].concat(o.body.map((function(e){return e.length}))));return o.cols=new Array(s).fill({type:"align",align:r}),t?{type:"leftright",mode:e.mode,body:[o],left:t[0],right:t[1],rightColor:void 0}:o},htmlBuilder:Yr,mathmlBuilder:Wr}),Or({type:"array",names:["smallmatrix"],props:{numArgs:0},handler:function(e){var t=Gr(e.parser,{arraystretch:.5},"script");return t.colSeparationType="small",t},htmlBuilder:Yr,mathmlBuilder:Wr}),Or({type:"array",names:["subarray"],props:{numArgs:1},handler:function(e,t){var r=(Xt(t[0])?[t[0]]:Ut(t[0],"ordgroup").body).map((function(e){var t=Yt(e).text;if(-1!=="lc".indexOf(t))return{type:"align",align:t};throw new n("Unknown column alignment: "+t,e)}));if(r.length>1)throw new n("{subarray} can contain only one column");var a={cols:r,hskipBeforeAndAfter:!1,arraystretch:.5};if((a=Gr(e.parser,a,"script")).body.length>0&&a.body[0].length>1)throw new n("{subarray} can contain only one column");return a},htmlBuilder:Yr,mathmlBuilder:Wr}),Or({type:"array",names:["cases","dcases","rcases","drcases"],props:{numArgs:0},handler:function(e){var t=Gr(e.parser,{arraystretch:1.2,cols:[{type:"align",align:"l",pregap:0,postgap:1},{type:"align",align:"l",pregap:0,postgap:0}]},Ur(e.envName));return{type:"leftright",mode:e.mode,body:[t],left:e.envName.indexOf("r")>-1?".":"\\{",right:e.envName.indexOf("r")>-1?"\\}":".",rightColor:void 0}},htmlBuilder:Yr,mathmlBuilder:Wr}),Or({type:"array",names:["align","align*","aligned","split"],props:{numArgs:0},handler:_r,htmlBuilder:Yr,mathmlBuilder:Wr}),Or({type:"array",names:["gathered","gather","gather*"],props:{numArgs:0},handler:function(e){l.contains(["gather","gather*"],e.envName)&&Fr(e);var t={cols:[{type:"align",align:"c"}],addJot:!0,colSeparationType:"gather",autoTag:Vr(e.envName),emptySingleRow:!0,leqno:e.parser.settings.leqno};return Gr(e.parser,t,"display")},htmlBuilder:Yr,mathmlBuilder:Wr}),Or({type:"array",names:["alignat","alignat*","alignedat"],props:{numArgs:1},handler:_r,htmlBuilder:Yr,mathmlBuilder:Wr}),Or({type:"array",names:["equation","equation*"],props:{numArgs:0},handler:function(e){Fr(e);var t={autoTag:Vr(e.envName),emptySingleRow:!0,singleRow:!0,maxNumCols:1,leqno:e.parser.settings.leqno};return Gr(e.parser,t,"display")},htmlBuilder:Yr,mathmlBuilder:Wr}),Or({type:"array",names:["CD"],props:{numArgs:0},handler:function(e){return Fr(e),function(e){var t=[];for(e.gullet.beginGroup(),e.gullet.macros.set("\\cr","\\\\\\relax"),e.gullet.beginGroup();;){t.push(e.parseExpression(!1,"\\\\")),e.gullet.endGroup(),e.gullet.beginGroup();var r=e.fetch().text;if("&"!==r&&"\\\\"!==r){if("\\end"===r){0===t[t.length-1].length&&t.pop();break}throw new n("Expected \\\\ or \\cr or \\end",e.nextToken)}e.consume()}for(var a,i,o=[],s=[o],l=0;l-1);else{if(!("<>AV".indexOf(u)>-1))throw new n('Expected one of "<>AV=|." after @',h[c]);for(var d=0;d<2;d++){for(var f=!0,g=c+1;g=x.SCRIPT.id?r.text():x.DISPLAY:"text"===e&&r.size===x.DISPLAY.size?r=x.TEXT:"script"===e?r=x.SCRIPT:"scriptscript"===e&&(r=x.SCRIPTSCRIPT),r},nn=function(e,t){var r,n=rn(e.size,t.style),a=n.fracNum(),i=n.fracDen();r=t.havingStyle(a);var o=wt(e.numer,r,t);if(e.continued){var s=8.5/t.fontMetrics().ptPerEm,l=3.5/t.fontMetrics().ptPerEm;o.height=o.height0?3*c:7*c,d=t.fontMetrics().denom1):(m>0?(u=t.fontMetrics().num2,p=c):(u=t.fontMetrics().num3,p=3*c),d=t.fontMetrics().denom2),h){var w=t.fontMetrics().axisHeight;u-o.depth-(w+.5*m)0&&(t="."===(t=e)?null:t),t};ot({type:"genfrac",names:["\\genfrac"],props:{numArgs:6,allowedInArgument:!0,argTypes:["math","math","size","text","math","math"]},handler:function(e,t){var r,n=e.parser,a=t[4],i=t[5],o=lt(t[0]),s="atom"===o.type&&"open"===o.family?sn(o.text):null,l=lt(t[1]),h="atom"===l.type&&"close"===l.family?sn(l.text):null,m=Ut(t[2],"size"),c=null;r=!!m.isBlank||(c=m.value).number>0;var u="auto",p=t[3];if("ordgroup"===p.type){if(p.body.length>0){var d=Ut(p.body[0],"textord");u=on[Number(d.text)]}}else p=Ut(p,"textord"),u=on[Number(p.text)];return{type:"genfrac",mode:n.mode,numer:a,denom:i,continued:!1,hasBarLine:r,barSize:c,leftDelim:s,rightDelim:h,size:u}},htmlBuilder:nn,mathmlBuilder:an}),ot({type:"infix",names:["\\above"],props:{numArgs:1,argTypes:["size"],infix:!0},handler:function(e,t){var r=e.parser,n=(e.funcName,e.token);return{type:"infix",mode:r.mode,replaceWith:"\\\\abovefrac",size:Ut(t[0],"size").value,token:n}}}),ot({type:"genfrac",names:["\\\\abovefrac"],props:{numArgs:3,argTypes:["math","size","math"]},handler:function(e,t){var r=e.parser,n=(e.funcName,t[0]),a=function(e){if(!e)throw new Error("Expected non-null, but got "+String(e));return e}(Ut(t[1],"infix").size),i=t[2],o=a.number>0;return{type:"genfrac",mode:r.mode,numer:n,denom:i,continued:!1,hasBarLine:o,barSize:a,leftDelim:null,rightDelim:null,size:"auto"}},htmlBuilder:nn,mathmlBuilder:an});var ln=function(e,t){var r,n,a=t.style;"supsub"===e.type?(r=e.sup?wt(e.sup,t.havingStyle(a.sup()),t):wt(e.sub,t.havingStyle(a.sub()),t),n=Ut(e.base,"horizBrace")):n=Ut(e,"horizBrace");var i,o=wt(n.base,t.havingBaseStyle(x.DISPLAY)),s=Gt(n,t);if(n.isOver?(i=Ke.makeVList({positionType:"firstBaseline",children:[{type:"elem",elem:o},{type:"kern",size:.1},{type:"elem",elem:s}]},t)).children[0].children[0].children[1].classes.push("svg-align"):(i=Ke.makeVList({positionType:"bottom",positionData:o.depth+.1+s.height,children:[{type:"elem",elem:s},{type:"kern",size:.1},{type:"elem",elem:o}]},t)).children[0].children[0].children[0].classes.push("svg-align"),r){var l=Ke.makeSpan(["mord",n.isOver?"mover":"munder"],[i],t);i=n.isOver?Ke.makeVList({positionType:"firstBaseline",children:[{type:"elem",elem:l},{type:"kern",size:.2},{type:"elem",elem:r}]},t):Ke.makeVList({positionType:"bottom",positionData:l.depth+.2+r.height+r.depth,children:[{type:"elem",elem:r},{type:"kern",size:.2},{type:"elem",elem:l}]},t)}return Ke.makeSpan(["mord",n.isOver?"mover":"munder"],[i],t)};ot({type:"horizBrace",names:["\\overbrace","\\underbrace"],props:{numArgs:1},handler:function(e,t){var r=e.parser,n=e.funcName;return{type:"horizBrace",mode:r.mode,label:n,isOver:/^\\over/.test(n),base:t[0]}},htmlBuilder:ln,mathmlBuilder:function(e,t){var r=Vt(e.label);return new Tt.MathNode(e.isOver?"mover":"munder",[Rt(e.base,t),r])}}),ot({type:"href",names:["\\href"],props:{numArgs:2,argTypes:["url","original"],allowedInText:!0},handler:function(e,t){var r=e.parser,n=t[1],a=Ut(t[0],"url").url;return r.settings.isTrusted({command:"\\href",url:a})?{type:"href",mode:r.mode,href:a,body:ht(n)}:r.formatUnsupportedCmd("\\href")},htmlBuilder:function(e,t){var r=ft(e.body,t,!1);return Ke.makeAnchor(e.href,[],r,t)},mathmlBuilder:function(e,t){var r=It(e.body,t);return r instanceof zt||(r=new zt("mrow",[r])),r.setAttribute("href",e.href),r}}),ot({type:"href",names:["\\url"],props:{numArgs:1,argTypes:["url"],allowedInText:!0},handler:function(e,t){var r=e.parser,n=Ut(t[0],"url").url;if(!r.settings.isTrusted({command:"\\url",url:n}))return r.formatUnsupportedCmd("\\url");for(var a=[],i=0;i0&&(n=F(e.totalheight,t)-r);var a=0;e.width.number>0&&(a=F(e.width,t));var i={height:V(r+n)};a>0&&(i.width=V(a)),n>0&&(i.verticalAlign=V(-n));var o=new j(e.src,e.alt,i);return o.height=r,o.depth=n,o},mathmlBuilder:function(e,t){var r=new Tt.MathNode("mglyph",[]);r.setAttribute("alt",e.alt);var n=F(e.height,t),a=0;if(e.totalheight.number>0&&(a=F(e.totalheight,t)-n,r.setAttribute("valign",V(-a))),r.setAttribute("height",V(n+a)),e.width.number>0){var i=F(e.width,t);r.setAttribute("width",V(i))}return r.setAttribute("src",e.src),r}}),ot({type:"kern",names:["\\kern","\\mkern","\\hskip","\\mskip"],props:{numArgs:1,argTypes:["size"],primitive:!0,allowedInText:!0},handler:function(e,t){var r=e.parser,n=e.funcName,a=Ut(t[0],"size");if(r.settings.strict){var i="m"===n[1],o="mu"===a.value.unit;i?(o||r.settings.reportNonstrict("mathVsTextUnits","LaTeX's "+n+" supports only mu units, not "+a.value.unit+" units"),"math"!==r.mode&&r.settings.reportNonstrict("mathVsTextUnits","LaTeX's "+n+" works only in math mode")):o&&r.settings.reportNonstrict("mathVsTextUnits","LaTeX's "+n+" doesn't support mu units")}return{type:"kern",mode:r.mode,dimension:a.value}},htmlBuilder:function(e,t){return Ke.makeGlue(e.dimension,t)},mathmlBuilder:function(e,t){var r=F(e.dimension,t);return new Tt.SpaceNode(r)}}),ot({type:"lap",names:["\\mathllap","\\mathrlap","\\mathclap"],props:{numArgs:1,allowedInText:!0},handler:function(e,t){var r=e.parser,n=e.funcName,a=t[0];return{type:"lap",mode:r.mode,alignment:n.slice(5),body:a}},htmlBuilder:function(e,t){var r;"clap"===e.alignment?(r=Ke.makeSpan([],[wt(e.body,t)]),r=Ke.makeSpan(["inner"],[r],t)):r=Ke.makeSpan(["inner"],[wt(e.body,t)]);var n=Ke.makeSpan(["fix"],[]),a=Ke.makeSpan([e.alignment],[r,n],t),i=Ke.makeSpan(["strut"]);return i.style.height=V(a.height+a.depth),a.depth&&(i.style.verticalAlign=V(-a.depth)),a.children.unshift(i),a=Ke.makeSpan(["thinbox"],[a],t),Ke.makeSpan(["mord","vbox"],[a],t)},mathmlBuilder:function(e,t){var r=new Tt.MathNode("mpadded",[Rt(e.body,t)]);if("rlap"!==e.alignment){var n="llap"===e.alignment?"-1":"-0.5";r.setAttribute("lspace",n+"width")}return r.setAttribute("width","0px"),r}}),ot({type:"styling",names:["\\(","$"],props:{numArgs:0,allowedInText:!0,allowedInMath:!1},handler:function(e,t){var r=e.funcName,n=e.parser,a=n.mode;n.switchMode("math");var i="\\("===r?"\\)":"$",o=n.parseExpression(!1,i);return n.expect(i),n.switchMode(a),{type:"styling",mode:n.mode,style:"text",body:o}}}),ot({type:"text",names:["\\)","\\]"],props:{numArgs:0,allowedInText:!0,allowedInMath:!1},handler:function(e,t){throw new n("Mismatched "+e.funcName)}});var mn=function(e,t){switch(t.style.size){case x.DISPLAY.size:return e.display;case x.TEXT.size:return e.text;case x.SCRIPT.size:return e.script;case x.SCRIPTSCRIPT.size:return e.scriptscript;default:return e.text}};ot({type:"mathchoice",names:["\\mathchoice"],props:{numArgs:4,primitive:!0},handler:function(e,t){return{type:"mathchoice",mode:e.parser.mode,display:ht(t[0]),text:ht(t[1]),script:ht(t[2]),scriptscript:ht(t[3])}},htmlBuilder:function(e,t){var r=mn(e,t),n=ft(r,t,!1);return Ke.makeFragment(n)},mathmlBuilder:function(e,t){var r=mn(e,t);return It(r,t)}});var cn=function(e,t,r,n,a,i,o){e=Ke.makeSpan([],[e]);var s,h,m,c=r&&l.isCharacterBox(r);if(t){var u=wt(t,n.havingStyle(a.sup()),n);h={elem:u,kern:Math.max(n.fontMetrics().bigOpSpacing1,n.fontMetrics().bigOpSpacing3-u.depth)}}if(r){var p=wt(r,n.havingStyle(a.sub()),n);s={elem:p,kern:Math.max(n.fontMetrics().bigOpSpacing2,n.fontMetrics().bigOpSpacing4-p.height)}}if(h&&s){var d=n.fontMetrics().bigOpSpacing5+s.elem.height+s.elem.depth+s.kern+e.depth+o;m=Ke.makeVList({positionType:"bottom",positionData:d,children:[{type:"kern",size:n.fontMetrics().bigOpSpacing5},{type:"elem",elem:s.elem,marginLeft:V(-i)},{type:"kern",size:s.kern},{type:"elem",elem:e},{type:"kern",size:h.kern},{type:"elem",elem:h.elem,marginLeft:V(i)},{type:"kern",size:n.fontMetrics().bigOpSpacing5}]},n)}else if(s){var f=e.height-o;m=Ke.makeVList({positionType:"top",positionData:f,children:[{type:"kern",size:n.fontMetrics().bigOpSpacing5},{type:"elem",elem:s.elem,marginLeft:V(-i)},{type:"kern",size:s.kern},{type:"elem",elem:e}]},n)}else{if(!h)return e;var g=e.depth+o;m=Ke.makeVList({positionType:"bottom",positionData:g,children:[{type:"elem",elem:e},{type:"kern",size:h.kern},{type:"elem",elem:h.elem,marginLeft:V(i)},{type:"kern",size:n.fontMetrics().bigOpSpacing5}]},n)}var v=[m];if(s&&0!==i&&!c){var b=Ke.makeSpan(["mspace"],[],n);b.style.marginRight=V(i),v.unshift(b)}return Ke.makeSpan(["mop","op-limits"],v,n)},un=["\\smallint"],pn=function(e,t){var r,n,a,i=!1;"supsub"===e.type?(r=e.sup,n=e.sub,a=Ut(e.base,"op"),i=!0):a=Ut(e,"op");var o,s=t.style,h=!1;if(s.size===x.DISPLAY.size&&a.symbol&&!l.contains(un,a.name)&&(h=!0),a.symbol){var m=h?"Size2-Regular":"Size1-Regular",c="";if("\\oiint"!==a.name&&"\\oiiint"!==a.name||(c=a.name.substr(1),a.name="oiint"===c?"\\iint":"\\iiint"),o=Ke.makeSymbol(a.name,m,"math",t,["mop","op-symbol",h?"large-op":"small-op"]),c.length>0){var u=o.italic,p=Ke.staticSvg(c+"Size"+(h?"2":"1"),t);o=Ke.makeVList({positionType:"individualShift",children:[{type:"elem",elem:o,shift:0},{type:"elem",elem:p,shift:h?.08:0}]},t),a.name="\\"+c,o.classes.unshift("mop"),o.italic=u}}else if(a.body){var d=ft(a.body,t,!0);1===d.length&&d[0]instanceof Z?(o=d[0]).classes[0]="mop":o=Ke.makeSpan(["mop"],d,t)}else{for(var f=[],g=1;g0){for(var s=a.body.map((function(e){var t=e.text;return"string"==typeof t?{type:"textord",mode:e.mode,text:t}:e})),l=ft(s,t.withFont("mathrm"),!0),h=0;h=0?s.setAttribute("height",V(a)):(s.setAttribute("height",V(a)),s.setAttribute("depth",V(-a))),s.setAttribute("voffset",V(a)),s}});var yn=["\\tiny","\\sixptsize","\\scriptsize","\\footnotesize","\\small","\\normalsize","\\large","\\Large","\\LARGE","\\huge","\\Huge"];ot({type:"sizing",names:yn,props:{numArgs:0,allowedInText:!0},handler:function(e,t){var r=e.breakOnTokenText,n=e.funcName,a=e.parser,i=a.parseExpression(!1,r);return{type:"sizing",mode:a.mode,size:yn.indexOf(n)+1,body:i}},htmlBuilder:function(e,t){var r=t.havingSize(e.size);return bn(e.body,r,t)},mathmlBuilder:function(e,t){var r=t.havingSize(e.size),n=Nt(e.body,r),a=new Tt.MathNode("mstyle",n);return a.setAttribute("mathsize",V(r.sizeMultiplier)),a}}),ot({type:"smash",names:["\\smash"],props:{numArgs:1,numOptionalArgs:1,allowedInText:!0},handler:function(e,t,r){var n=e.parser,a=!1,i=!1,o=r[0]&&Ut(r[0],"ordgroup");if(o)for(var s="",l=0;lr.height+r.depth+i&&(i=(i+c-r.height-r.depth)/2);var u=l.height-r.height-i-h;r.style.paddingLeft=V(m);var p=Ke.makeVList({positionType:"firstBaseline",children:[{type:"elem",elem:r,wrapperClasses:["svg-align"]},{type:"kern",size:-(r.height+u)},{type:"elem",elem:l},{type:"kern",size:h}]},t);if(e.index){var d=t.havingStyle(x.SCRIPTSCRIPT),f=wt(e.index,d,t),g=.6*(p.height-p.depth),v=Ke.makeVList({positionType:"shift",positionData:-g,children:[{type:"elem",elem:f}]},t),b=Ke.makeSpan(["root"],[v]);return Ke.makeSpan(["mord","sqrt"],[b,p],t)}return Ke.makeSpan(["mord","sqrt"],[p],t)},mathmlBuilder:function(e,t){var r=e.body,n=e.index;return n?new Tt.MathNode("mroot",[Rt(r,t),Rt(n,t)]):new Tt.MathNode("msqrt",[Rt(r,t)])}});var xn={display:x.DISPLAY,text:x.TEXT,script:x.SCRIPT,scriptscript:x.SCRIPTSCRIPT};ot({type:"styling",names:["\\displaystyle","\\textstyle","\\scriptstyle","\\scriptscriptstyle"],props:{numArgs:0,allowedInText:!0,primitive:!0},handler:function(e,t){var r=e.breakOnTokenText,n=e.funcName,a=e.parser,i=a.parseExpression(!0,r),o=n.slice(1,n.length-5);return{type:"styling",mode:a.mode,style:o,body:i}},htmlBuilder:function(e,t){var r=xn[e.style],n=t.havingStyle(r).withFont("");return bn(e.body,n,t)},mathmlBuilder:function(e,t){var r=xn[e.style],n=t.havingStyle(r),a=Nt(e.body,n),i=new Tt.MathNode("mstyle",a),o={display:["0","true"],text:["0","false"],script:["1","false"],scriptscript:["2","false"]}[e.style];return i.setAttribute("scriptlevel",o[0]),i.setAttribute("displaystyle",o[1]),i}});var wn=function(e,t){var r=e.base;return r?"op"===r.type?r.limits&&(t.style.size===x.DISPLAY.size||r.alwaysHandleSupSub)?pn:null:"operatorname"===r.type?r.alwaysHandleSupSub&&(t.style.size===x.DISPLAY.size||r.limits)?vn:null:"accent"===r.type?l.isCharacterBox(r.base)?Wt:null:"horizBrace"===r.type&&!e.sub===r.isOver?ln:null:null};st({type:"supsub",htmlBuilder:function(e,t){var r=wn(e,t);if(r)return r(e,t);var n,a,i,o=e.base,s=e.sup,h=e.sub,m=wt(o,t),c=t.fontMetrics(),u=0,p=0,d=o&&l.isCharacterBox(o);if(s){var f=t.havingStyle(t.style.sup());n=wt(s,f,t),d||(u=m.height-f.fontMetrics().supDrop*f.sizeMultiplier/t.sizeMultiplier)}if(h){var g=t.havingStyle(t.style.sub());a=wt(h,g,t),d||(p=m.depth+g.fontMetrics().subDrop*g.sizeMultiplier/t.sizeMultiplier)}i=t.style===x.DISPLAY?c.sup1:t.style.cramped?c.sup3:c.sup2;var v,b=t.sizeMultiplier,y=V(.5/c.ptPerEm/b),w=null;if(a){var k=e.base&&"op"===e.base.type&&e.base.name&&("\\oiint"===e.base.name||"\\oiiint"===e.base.name);(m instanceof Z||k)&&(w=V(-m.italic))}if(n&&a){u=Math.max(u,i,n.depth+.25*c.xHeight),p=Math.max(p,c.sub2);var S=4*c.defaultRuleThickness;if(u-n.depth-(a.height-p)0&&(u+=M,p-=M)}var z=[{type:"elem",elem:a,shift:p,marginRight:y,marginLeft:w},{type:"elem",elem:n,shift:-u,marginRight:y}];v=Ke.makeVList({positionType:"individualShift",children:z},t)}else if(a){p=Math.max(p,c.sub1,a.height-.8*c.xHeight);var A=[{type:"elem",elem:a,marginLeft:w,marginRight:y}];v=Ke.makeVList({positionType:"shift",positionData:p,children:A},t)}else{if(!n)throw new Error("supsub must have either sup or sub.");u=Math.max(u,i,n.depth+.25*c.xHeight),v=Ke.makeVList({positionType:"shift",positionData:-u,children:[{type:"elem",elem:n,marginRight:y}]},t)}var T=yt(m,"right")||"mord";return Ke.makeSpan([T],[m,Ke.makeSpan(["msupsub"],[v])],t)},mathmlBuilder:function(e,t){var r,n=!1;e.base&&"horizBrace"===e.base.type&&!!e.sup===e.base.isOver&&(n=!0,r=e.base.isOver),!e.base||"op"!==e.base.type&&"operatorname"!==e.base.type||(e.base.parentIsSupSub=!0);var a,i=[Rt(e.base,t)];if(e.sub&&i.push(Rt(e.sub,t)),e.sup&&i.push(Rt(e.sup,t)),n)a=r?"mover":"munder";else if(e.sub)if(e.sup){var o=e.base;a=o&&"op"===o.type&&o.limits&&t.style===x.DISPLAY||o&&"operatorname"===o.type&&o.alwaysHandleSupSub&&(t.style===x.DISPLAY||o.limits)?"munderover":"msubsup"}else{var s=e.base;a=s&&"op"===s.type&&s.limits&&(t.style===x.DISPLAY||s.alwaysHandleSupSub)||s&&"operatorname"===s.type&&s.alwaysHandleSupSub&&(s.limits||t.style===x.DISPLAY)?"munder":"msub"}else{var l=e.base;a=l&&"op"===l.type&&l.limits&&(t.style===x.DISPLAY||l.alwaysHandleSupSub)||l&&"operatorname"===l.type&&l.alwaysHandleSupSub&&(l.limits||t.style===x.DISPLAY)?"mover":"msup"}return new Tt.MathNode(a,i)}}),st({type:"atom",htmlBuilder:function(e,t){return Ke.mathsym(e.text,e.mode,t,["m"+e.family])},mathmlBuilder:function(e,t){var r=new Tt.MathNode("mo",[Bt(e.text,e.mode)]);if("bin"===e.family){var n=qt(e,t);"bold-italic"===n&&r.setAttribute("mathvariant",n)}else"punct"===e.family?r.setAttribute("separator","true"):"open"!==e.family&&"close"!==e.family||r.setAttribute("stretchy","false");return r}});var kn={mi:"italic",mn:"normal",mtext:"normal"};st({type:"mathord",htmlBuilder:function(e,t){return Ke.makeOrd(e,t,"mathord")},mathmlBuilder:function(e,t){var r=new Tt.MathNode("mi",[Bt(e.text,e.mode,t)]),n=qt(e,t)||"italic";return n!==kn[r.type]&&r.setAttribute("mathvariant",n),r}}),st({type:"textord",htmlBuilder:function(e,t){return Ke.makeOrd(e,t,"textord")},mathmlBuilder:function(e,t){var r,n=Bt(e.text,e.mode,t),a=qt(e,t)||"normal";return r="text"===e.mode?new Tt.MathNode("mtext",[n]):/[0-9]/.test(e.text)?new Tt.MathNode("mn",[n]):"\\prime"===e.text?new Tt.MathNode("mo",[n]):new Tt.MathNode("mi",[n]),a!==kn[r.type]&&r.setAttribute("mathvariant",a),r}});var Sn={"\\nobreak":"nobreak","\\allowbreak":"allowbreak"},Mn={" ":{},"\\ ":{},"~":{className:"nobreak"},"\\space":{},"\\nobreakspace":{className:"nobreak"}};st({type:"spacing",htmlBuilder:function(e,t){if(Mn.hasOwnProperty(e.text)){var r=Mn[e.text].className||"";if("text"===e.mode){var a=Ke.makeOrd(e,t,"textord");return a.classes.push(r),a}return Ke.makeSpan(["mspace",r],[Ke.mathsym(e.text,e.mode,t)],t)}if(Sn.hasOwnProperty(e.text))return Ke.makeSpan(["mspace",Sn[e.text]],[],t);throw new n('Unknown type of space "'+e.text+'"')},mathmlBuilder:function(e,t){if(!Mn.hasOwnProperty(e.text)){if(Sn.hasOwnProperty(e.text))return new Tt.MathNode("mspace");throw new n('Unknown type of space "'+e.text+'"')}return new Tt.MathNode("mtext",[new Tt.TextNode("\xa0")])}});var zn=function(){var e=new Tt.MathNode("mtd",[]);return e.setAttribute("width","50%"),e};st({type:"tag",mathmlBuilder:function(e,t){var r=new Tt.MathNode("mtable",[new Tt.MathNode("mtr",[zn(),new Tt.MathNode("mtd",[It(e.body,t)]),zn(),new Tt.MathNode("mtd",[It(e.tag,t)])])]);return r.setAttribute("width","100%"),r}});var An={"\\text":void 0,"\\textrm":"textrm","\\textsf":"textsf","\\texttt":"texttt","\\textnormal":"textrm"},Tn={"\\textbf":"textbf","\\textmd":"textmd"},Bn={"\\textit":"textit","\\textup":"textup"},Cn=function(e,t){var r=e.font;return r?An[r]?t.withTextFontFamily(An[r]):Tn[r]?t.withTextFontWeight(Tn[r]):t.withTextFontShape(Bn[r]):t};ot({type:"text",names:["\\text","\\textrm","\\textsf","\\texttt","\\textnormal","\\textbf","\\textmd","\\textit","\\textup"],props:{numArgs:1,argTypes:["text"],allowedInArgument:!0,allowedInText:!0},handler:function(e,t){var r=e.parser,n=e.funcName,a=t[0];return{type:"text",mode:r.mode,body:ht(a),font:n}},htmlBuilder:function(e,t){var r=Cn(e,t),n=ft(e.body,r,!0);return Ke.makeSpan(["mord","text"],n,r)},mathmlBuilder:function(e,t){var r=Cn(e,t);return It(e.body,r)}}),ot({type:"underline",names:["\\underline"],props:{numArgs:1,allowedInText:!0},handler:function(e,t){return{type:"underline",mode:e.parser.mode,body:t[0]}},htmlBuilder:function(e,t){var r=wt(e.body,t),n=Ke.makeLineSpan("underline-line",t),a=t.fontMetrics().defaultRuleThickness,i=Ke.makeVList({positionType:"top",positionData:r.height,children:[{type:"kern",size:a},{type:"elem",elem:n},{type:"kern",size:3*a},{type:"elem",elem:r}]},t);return Ke.makeSpan(["mord","underline"],[i],t)},mathmlBuilder:function(e,t){var r=new Tt.MathNode("mo",[new Tt.TextNode("\u203e")]);r.setAttribute("stretchy","true");var n=new Tt.MathNode("munder",[Rt(e.body,t),r]);return n.setAttribute("accentunder","true"),n}}),ot({type:"vcenter",names:["\\vcenter"],props:{numArgs:1,argTypes:["original"],allowedInText:!1},handler:function(e,t){return{type:"vcenter",mode:e.parser.mode,body:t[0]}},htmlBuilder:function(e,t){var r=wt(e.body,t),n=t.fontMetrics().axisHeight,a=.5*(r.height-n-(r.depth+n));return Ke.makeVList({positionType:"shift",positionData:a,children:[{type:"elem",elem:r}]},t)},mathmlBuilder:function(e,t){return new Tt.MathNode("mpadded",[Rt(e.body,t)],["vcenter"])}}),ot({type:"verb",names:["\\verb"],props:{numArgs:0,allowedInText:!0},handler:function(e,t,r){throw new n("\\verb ended by end of line instead of matching delimiter")},htmlBuilder:function(e,t){for(var r=qn(e),n=[],a=t.havingStyle(t.style.text()),i=0;i0;)this.endGroup()},t.has=function(e){return this.current.hasOwnProperty(e)||this.builtins.hasOwnProperty(e)},t.get=function(e){return this.current.hasOwnProperty(e)?this.current[e]:this.builtins[e]},t.set=function(e,t,r){if(void 0===r&&(r=!1),r){for(var n=0;n0&&(this.undefStack[this.undefStack.length-1][e]=t)}else{var a=this.undefStack[this.undefStack.length-1];a&&!a.hasOwnProperty(e)&&(a[e]=this.current[e])}null==t?delete this.current[e]:this.current[e]=t},e}(),Hn=Hr;Er("\\noexpand",(function(e){var t=e.popToken();return e.isExpandable(t.text)&&(t.noexpand=!0,t.treatAsRelax=!0),{tokens:[t],numArgs:0}})),Er("\\expandafter",(function(e){var t=e.popToken();return e.expandOnce(!0),{tokens:[t],numArgs:0}})),Er("\\@firstoftwo",(function(e){return{tokens:e.consumeArgs(2)[0],numArgs:0}})),Er("\\@secondoftwo",(function(e){return{tokens:e.consumeArgs(2)[1],numArgs:0}})),Er("\\@ifnextchar",(function(e){var t=e.consumeArgs(3);e.consumeSpaces();var r=e.future();return 1===t[0].length&&t[0][0].text===r.text?{tokens:t[1],numArgs:0}:{tokens:t[2],numArgs:0}})),Er("\\@ifstar","\\@ifnextchar *{\\@firstoftwo{#1}}"),Er("\\TextOrMath",(function(e){var t=e.consumeArgs(2);return"text"===e.mode?{tokens:t[0],numArgs:0}:{tokens:t[1],numArgs:0}}));var En={0:0,1:1,2:2,3:3,4:4,5:5,6:6,7:7,8:8,9:9,a:10,A:10,b:11,B:11,c:12,C:12,d:13,D:13,e:14,E:14,f:15,F:15};Er("\\char",(function(e){var t,r=e.popToken(),a="";if("'"===r.text)t=8,r=e.popToken();else if('"'===r.text)t=16,r=e.popToken();else if("`"===r.text)if("\\"===(r=e.popToken()).text[0])a=r.text.charCodeAt(1);else{if("EOF"===r.text)throw new n("\\char` missing argument");a=r.text.charCodeAt(0)}else t=10;if(t){if(null==(a=En[r.text])||a>=t)throw new n("Invalid base-"+t+" digit "+r.text);for(var i;null!=(i=En[e.future().text])&&i":"\\dotsb","-":"\\dotsb","*":"\\dotsb",":":"\\dotsb","\\DOTSB":"\\dotsb","\\coprod":"\\dotsb","\\bigvee":"\\dotsb","\\bigwedge":"\\dotsb","\\biguplus":"\\dotsb","\\bigcap":"\\dotsb","\\bigcup":"\\dotsb","\\prod":"\\dotsb","\\sum":"\\dotsb","\\bigotimes":"\\dotsb","\\bigoplus":"\\dotsb","\\bigodot":"\\dotsb","\\bigsqcup":"\\dotsb","\\And":"\\dotsb","\\longrightarrow":"\\dotsb","\\Longrightarrow":"\\dotsb","\\longleftarrow":"\\dotsb","\\Longleftarrow":"\\dotsb","\\longleftrightarrow":"\\dotsb","\\Longleftrightarrow":"\\dotsb","\\mapsto":"\\dotsb","\\longmapsto":"\\dotsb","\\hookrightarrow":"\\dotsb","\\doteq":"\\dotsb","\\mathbin":"\\dotsb","\\mathrel":"\\dotsb","\\relbar":"\\dotsb","\\Relbar":"\\dotsb","\\xrightarrow":"\\dotsb","\\xleftarrow":"\\dotsb","\\DOTSI":"\\dotsi","\\int":"\\dotsi","\\oint":"\\dotsi","\\iint":"\\dotsi","\\iiint":"\\dotsi","\\iiiint":"\\dotsi","\\idotsint":"\\dotsi","\\DOTSX":"\\dotsx"};Er("\\dots",(function(e){var t="\\dotso",r=e.expandAfterFuture().text;return r in Dn?t=Dn[r]:("\\not"===r.substr(0,4)||r in ae.math&&l.contains(["bin","rel"],ae.math[r].group))&&(t="\\dotsb"),t}));var Pn={")":!0,"]":!0,"\\rbrack":!0,"\\}":!0,"\\rbrace":!0,"\\rangle":!0,"\\rceil":!0,"\\rfloor":!0,"\\rgroup":!0,"\\rmoustache":!0,"\\right":!0,"\\bigr":!0,"\\biggr":!0,"\\Bigr":!0,"\\Biggr":!0,$:!0,";":!0,".":!0,",":!0};Er("\\dotso",(function(e){return e.future().text in Pn?"\\ldots\\,":"\\ldots"})),Er("\\dotsc",(function(e){var t=e.future().text;return t in Pn&&","!==t?"\\ldots\\,":"\\ldots"})),Er("\\cdots",(function(e){return e.future().text in Pn?"\\@cdots\\,":"\\@cdots"})),Er("\\dotsb","\\cdots"),Er("\\dotsm","\\cdots"),Er("\\dotsi","\\!\\cdots"),Er("\\dotsx","\\ldots\\,"),Er("\\DOTSI","\\relax"),Er("\\DOTSB","\\relax"),Er("\\DOTSX","\\relax"),Er("\\tmspace","\\TextOrMath{\\kern#1#3}{\\mskip#1#2}\\relax"),Er("\\,","\\tmspace+{3mu}{.1667em}"),Er("\\thinspace","\\,"),Er("\\>","\\mskip{4mu}"),Er("\\:","\\tmspace+{4mu}{.2222em}"),Er("\\medspace","\\:"),Er("\\;","\\tmspace+{5mu}{.2777em}"),Er("\\thickspace","\\;"),Er("\\!","\\tmspace-{3mu}{.1667em}"),Er("\\negthinspace","\\!"),Er("\\negmedspace","\\tmspace-{4mu}{.2222em}"),Er("\\negthickspace","\\tmspace-{5mu}{.277em}"),Er("\\enspace","\\kern.5em "),Er("\\enskip","\\hskip.5em\\relax"),Er("\\quad","\\hskip1em\\relax"),Er("\\qquad","\\hskip2em\\relax"),Er("\\tag","\\@ifstar\\tag@literal\\tag@paren"),Er("\\tag@paren","\\tag@literal{({#1})}"),Er("\\tag@literal",(function(e){if(e.macros.get("\\df@tag"))throw new n("Multiple \\tag");return"\\gdef\\df@tag{\\text{#1}}"})),Er("\\bmod","\\mathchoice{\\mskip1mu}{\\mskip1mu}{\\mskip5mu}{\\mskip5mu}\\mathbin{\\rm mod}\\mathchoice{\\mskip1mu}{\\mskip1mu}{\\mskip5mu}{\\mskip5mu}"),Er("\\pod","\\allowbreak\\mathchoice{\\mkern18mu}{\\mkern8mu}{\\mkern8mu}{\\mkern8mu}(#1)"),Er("\\pmod","\\pod{{\\rm mod}\\mkern6mu#1}"),Er("\\mod","\\allowbreak\\mathchoice{\\mkern18mu}{\\mkern12mu}{\\mkern12mu}{\\mkern12mu}{\\rm mod}\\,\\,#1"),Er("\\pmb","\\html@mathml{\\@binrel{#1}{\\mathrlap{#1}\\kern0.5px#1}}{\\mathbf{#1}}"),Er("\\newline","\\\\\\relax"),Er("\\TeX","\\textrm{\\html@mathml{T\\kern-.1667em\\raisebox{-.5ex}{E}\\kern-.125emX}{TeX}}");var Fn=V(T["Main-Regular"]["T".charCodeAt(0)][1]-.7*T["Main-Regular"]["A".charCodeAt(0)][1]);Er("\\LaTeX","\\textrm{\\html@mathml{L\\kern-.36em\\raisebox{"+Fn+"}{\\scriptstyle A}\\kern-.15em\\TeX}{LaTeX}}"),Er("\\KaTeX","\\textrm{\\html@mathml{K\\kern-.17em\\raisebox{"+Fn+"}{\\scriptstyle A}\\kern-.15em\\TeX}{KaTeX}}"),Er("\\hspace","\\@ifstar\\@hspacer\\@hspace"),Er("\\@hspace","\\hskip #1\\relax"),Er("\\@hspacer","\\rule{0pt}{0pt}\\hskip #1\\relax"),Er("\\ordinarycolon",":"),Er("\\vcentcolon","\\mathrel{\\mathop\\ordinarycolon}"),Er("\\dblcolon",'\\html@mathml{\\mathrel{\\vcentcolon\\mathrel{\\mkern-.9mu}\\vcentcolon}}{\\mathop{\\char"2237}}'),Er("\\coloneqq",'\\html@mathml{\\mathrel{\\vcentcolon\\mathrel{\\mkern-1.2mu}=}}{\\mathop{\\char"2254}}'),Er("\\Coloneqq",'\\html@mathml{\\mathrel{\\dblcolon\\mathrel{\\mkern-1.2mu}=}}{\\mathop{\\char"2237\\char"3d}}'),Er("\\coloneq",'\\html@mathml{\\mathrel{\\vcentcolon\\mathrel{\\mkern-1.2mu}\\mathrel{-}}}{\\mathop{\\char"3a\\char"2212}}'),Er("\\Coloneq",'\\html@mathml{\\mathrel{\\dblcolon\\mathrel{\\mkern-1.2mu}\\mathrel{-}}}{\\mathop{\\char"2237\\char"2212}}'),Er("\\eqqcolon",'\\html@mathml{\\mathrel{=\\mathrel{\\mkern-1.2mu}\\vcentcolon}}{\\mathop{\\char"2255}}'),Er("\\Eqqcolon",'\\html@mathml{\\mathrel{=\\mathrel{\\mkern-1.2mu}\\dblcolon}}{\\mathop{\\char"3d\\char"2237}}'),Er("\\eqcolon",'\\html@mathml{\\mathrel{\\mathrel{-}\\mathrel{\\mkern-1.2mu}\\vcentcolon}}{\\mathop{\\char"2239}}'),Er("\\Eqcolon",'\\html@mathml{\\mathrel{\\mathrel{-}\\mathrel{\\mkern-1.2mu}\\dblcolon}}{\\mathop{\\char"2212\\char"2237}}'),Er("\\colonapprox",'\\html@mathml{\\mathrel{\\vcentcolon\\mathrel{\\mkern-1.2mu}\\approx}}{\\mathop{\\char"3a\\char"2248}}'),Er("\\Colonapprox",'\\html@mathml{\\mathrel{\\dblcolon\\mathrel{\\mkern-1.2mu}\\approx}}{\\mathop{\\char"2237\\char"2248}}'),Er("\\colonsim",'\\html@mathml{\\mathrel{\\vcentcolon\\mathrel{\\mkern-1.2mu}\\sim}}{\\mathop{\\char"3a\\char"223c}}'),Er("\\Colonsim",'\\html@mathml{\\mathrel{\\dblcolon\\mathrel{\\mkern-1.2mu}\\sim}}{\\mathop{\\char"2237\\char"223c}}'),Er("\u2237","\\dblcolon"),Er("\u2239","\\eqcolon"),Er("\u2254","\\coloneqq"),Er("\u2255","\\eqqcolon"),Er("\u2a74","\\Coloneqq"),Er("\\ratio","\\vcentcolon"),Er("\\coloncolon","\\dblcolon"),Er("\\colonequals","\\coloneqq"),Er("\\coloncolonequals","\\Coloneqq"),Er("\\equalscolon","\\eqqcolon"),Er("\\equalscoloncolon","\\Eqqcolon"),Er("\\colonminus","\\coloneq"),Er("\\coloncolonminus","\\Coloneq"),Er("\\minuscolon","\\eqcolon"),Er("\\minuscoloncolon","\\Eqcolon"),Er("\\coloncolonapprox","\\Colonapprox"),Er("\\coloncolonsim","\\Colonsim"),Er("\\simcolon","\\mathrel{\\sim\\mathrel{\\mkern-1.2mu}\\vcentcolon}"),Er("\\simcoloncolon","\\mathrel{\\sim\\mathrel{\\mkern-1.2mu}\\dblcolon}"),Er("\\approxcolon","\\mathrel{\\approx\\mathrel{\\mkern-1.2mu}\\vcentcolon}"),Er("\\approxcoloncolon","\\mathrel{\\approx\\mathrel{\\mkern-1.2mu}\\dblcolon}"),Er("\\notni","\\html@mathml{\\not\\ni}{\\mathrel{\\char`\u220c}}"),Er("\\limsup","\\DOTSB\\operatorname*{lim\\,sup}"),Er("\\liminf","\\DOTSB\\operatorname*{lim\\,inf}"),Er("\\injlim","\\DOTSB\\operatorname*{inj\\,lim}"),Er("\\projlim","\\DOTSB\\operatorname*{proj\\,lim}"),Er("\\varlimsup","\\DOTSB\\operatorname*{\\overline{lim}}"),Er("\\varliminf","\\DOTSB\\operatorname*{\\underline{lim}}"),Er("\\varinjlim","\\DOTSB\\operatorname*{\\underrightarrow{lim}}"),Er("\\varprojlim","\\DOTSB\\operatorname*{\\underleftarrow{lim}}"),Er("\\gvertneqq","\\html@mathml{\\@gvertneqq}{\u2269}"),Er("\\lvertneqq","\\html@mathml{\\@lvertneqq}{\u2268}"),Er("\\ngeqq","\\html@mathml{\\@ngeqq}{\u2271}"),Er("\\ngeqslant","\\html@mathml{\\@ngeqslant}{\u2271}"),Er("\\nleqq","\\html@mathml{\\@nleqq}{\u2270}"),Er("\\nleqslant","\\html@mathml{\\@nleqslant}{\u2270}"),Er("\\nshortmid","\\html@mathml{\\@nshortmid}{\u2224}"),Er("\\nshortparallel","\\html@mathml{\\@nshortparallel}{\u2226}"),Er("\\nsubseteqq","\\html@mathml{\\@nsubseteqq}{\u2288}"),Er("\\nsupseteqq","\\html@mathml{\\@nsupseteqq}{\u2289}"),Er("\\varsubsetneq","\\html@mathml{\\@varsubsetneq}{\u228a}"),Er("\\varsubsetneqq","\\html@mathml{\\@varsubsetneqq}{\u2acb}"),Er("\\varsupsetneq","\\html@mathml{\\@varsupsetneq}{\u228b}"),Er("\\varsupsetneqq","\\html@mathml{\\@varsupsetneqq}{\u2acc}"),Er("\\imath","\\html@mathml{\\@imath}{\u0131}"),Er("\\jmath","\\html@mathml{\\@jmath}{\u0237}"),Er("\\llbracket","\\html@mathml{\\mathopen{[\\mkern-3.2mu[}}{\\mathopen{\\char`\u27e6}}"),Er("\\rrbracket","\\html@mathml{\\mathclose{]\\mkern-3.2mu]}}{\\mathclose{\\char`\u27e7}}"),Er("\u27e6","\\llbracket"),Er("\u27e7","\\rrbracket"),Er("\\lBrace","\\html@mathml{\\mathopen{\\{\\mkern-3.2mu[}}{\\mathopen{\\char`\u2983}}"),Er("\\rBrace","\\html@mathml{\\mathclose{]\\mkern-3.2mu\\}}}{\\mathclose{\\char`\u2984}}"),Er("\u2983","\\lBrace"),Er("\u2984","\\rBrace"),Er("\\minuso","\\mathbin{\\html@mathml{{\\mathrlap{\\mathchoice{\\kern{0.145em}}{\\kern{0.145em}}{\\kern{0.1015em}}{\\kern{0.0725em}}\\circ}{-}}}{\\char`\u29b5}}"),Er("\u29b5","\\minuso"),Er("\\darr","\\downarrow"),Er("\\dArr","\\Downarrow"),Er("\\Darr","\\Downarrow"),Er("\\lang","\\langle"),Er("\\rang","\\rangle"),Er("\\uarr","\\uparrow"),Er("\\uArr","\\Uparrow"),Er("\\Uarr","\\Uparrow"),Er("\\N","\\mathbb{N}"),Er("\\R","\\mathbb{R}"),Er("\\Z","\\mathbb{Z}"),Er("\\alef","\\aleph"),Er("\\alefsym","\\aleph"),Er("\\Alpha","\\mathrm{A}"),Er("\\Beta","\\mathrm{B}"),Er("\\bull","\\bullet"),Er("\\Chi","\\mathrm{X}"),Er("\\clubs","\\clubsuit"),Er("\\cnums","\\mathbb{C}"),Er("\\Complex","\\mathbb{C}"),Er("\\Dagger","\\ddagger"),Er("\\diamonds","\\diamondsuit"),Er("\\empty","\\emptyset"),Er("\\Epsilon","\\mathrm{E}"),Er("\\Eta","\\mathrm{H}"),Er("\\exist","\\exists"),Er("\\harr","\\leftrightarrow"),Er("\\hArr","\\Leftrightarrow"),Er("\\Harr","\\Leftrightarrow"),Er("\\hearts","\\heartsuit"),Er("\\image","\\Im"),Er("\\infin","\\infty"),Er("\\Iota","\\mathrm{I}"),Er("\\isin","\\in"),Er("\\Kappa","\\mathrm{K}"),Er("\\larr","\\leftarrow"),Er("\\lArr","\\Leftarrow"),Er("\\Larr","\\Leftarrow"),Er("\\lrarr","\\leftrightarrow"),Er("\\lrArr","\\Leftrightarrow"),Er("\\Lrarr","\\Leftrightarrow"),Er("\\Mu","\\mathrm{M}"),Er("\\natnums","\\mathbb{N}"),Er("\\Nu","\\mathrm{N}"),Er("\\Omicron","\\mathrm{O}"),Er("\\plusmn","\\pm"),Er("\\rarr","\\rightarrow"),Er("\\rArr","\\Rightarrow"),Er("\\Rarr","\\Rightarrow"),Er("\\real","\\Re"),Er("\\reals","\\mathbb{R}"),Er("\\Reals","\\mathbb{R}"),Er("\\Rho","\\mathrm{P}"),Er("\\sdot","\\cdot"),Er("\\sect","\\S"),Er("\\spades","\\spadesuit"),Er("\\sub","\\subset"),Er("\\sube","\\subseteq"),Er("\\supe","\\supseteq"),Er("\\Tau","\\mathrm{T}"),Er("\\thetasym","\\vartheta"),Er("\\weierp","\\wp"),Er("\\Zeta","\\mathrm{Z}"),Er("\\argmin","\\DOTSB\\operatorname*{arg\\,min}"),Er("\\argmax","\\DOTSB\\operatorname*{arg\\,max}"),Er("\\plim","\\DOTSB\\mathop{\\operatorname{plim}}\\limits"),Er("\\bra","\\mathinner{\\langle{#1}|}"),Er("\\ket","\\mathinner{|{#1}\\rangle}"),Er("\\braket","\\mathinner{\\langle{#1}\\rangle}"),Er("\\Bra","\\left\\langle#1\\right|"),Er("\\Ket","\\left|#1\\right\\rangle"),Er("\\angln","{\\angl n}"),Er("\\blue","\\textcolor{##6495ed}{#1}"),Er("\\orange","\\textcolor{##ffa500}{#1}"),Er("\\pink","\\textcolor{##ff00af}{#1}"),Er("\\red","\\textcolor{##df0030}{#1}"),Er("\\green","\\textcolor{##28ae7b}{#1}"),Er("\\gray","\\textcolor{gray}{#1}"),Er("\\purple","\\textcolor{##9d38bd}{#1}"),Er("\\blueA","\\textcolor{##ccfaff}{#1}"),Er("\\blueB","\\textcolor{##80f6ff}{#1}"),Er("\\blueC","\\textcolor{##63d9ea}{#1}"),Er("\\blueD","\\textcolor{##11accd}{#1}"),Er("\\blueE","\\textcolor{##0c7f99}{#1}"),Er("\\tealA","\\textcolor{##94fff5}{#1}"),Er("\\tealB","\\textcolor{##26edd5}{#1}"),Er("\\tealC","\\textcolor{##01d1c1}{#1}"),Er("\\tealD","\\textcolor{##01a995}{#1}"),Er("\\tealE","\\textcolor{##208170}{#1}"),Er("\\greenA","\\textcolor{##b6ffb0}{#1}"),Er("\\greenB","\\textcolor{##8af281}{#1}"),Er("\\greenC","\\textcolor{##74cf70}{#1}"),Er("\\greenD","\\textcolor{##1fab54}{#1}"),Er("\\greenE","\\textcolor{##0d923f}{#1}"),Er("\\goldA","\\textcolor{##ffd0a9}{#1}"),Er("\\goldB","\\textcolor{##ffbb71}{#1}"),Er("\\goldC","\\textcolor{##ff9c39}{#1}"),Er("\\goldD","\\textcolor{##e07d10}{#1}"),Er("\\goldE","\\textcolor{##a75a05}{#1}"),Er("\\redA","\\textcolor{##fca9a9}{#1}"),Er("\\redB","\\textcolor{##ff8482}{#1}"),Er("\\redC","\\textcolor{##f9685d}{#1}"),Er("\\redD","\\textcolor{##e84d39}{#1}"),Er("\\redE","\\textcolor{##bc2612}{#1}"),Er("\\maroonA","\\textcolor{##ffbde0}{#1}"),Er("\\maroonB","\\textcolor{##ff92c6}{#1}"),Er("\\maroonC","\\textcolor{##ed5fa6}{#1}"),Er("\\maroonD","\\textcolor{##ca337c}{#1}"),Er("\\maroonE","\\textcolor{##9e034e}{#1}"),Er("\\purpleA","\\textcolor{##ddd7ff}{#1}"),Er("\\purpleB","\\textcolor{##c6b9fc}{#1}"),Er("\\purpleC","\\textcolor{##aa87ff}{#1}"),Er("\\purpleD","\\textcolor{##7854ab}{#1}"),Er("\\purpleE","\\textcolor{##543b78}{#1}"),Er("\\mintA","\\textcolor{##f5f9e8}{#1}"),Er("\\mintB","\\textcolor{##edf2df}{#1}"),Er("\\mintC","\\textcolor{##e0e5cc}{#1}"),Er("\\grayA","\\textcolor{##f6f7f7}{#1}"),Er("\\grayB","\\textcolor{##f0f1f2}{#1}"),Er("\\grayC","\\textcolor{##e3e5e6}{#1}"),Er("\\grayD","\\textcolor{##d6d8da}{#1}"),Er("\\grayE","\\textcolor{##babec2}{#1}"),Er("\\grayF","\\textcolor{##888d93}{#1}"),Er("\\grayG","\\textcolor{##626569}{#1}"),Er("\\grayH","\\textcolor{##3b3e40}{#1}"),Er("\\grayI","\\textcolor{##21242c}{#1}"),Er("\\kaBlue","\\textcolor{##314453}{#1}"),Er("\\kaGreen","\\textcolor{##71B307}{#1}");var Vn={"^":!0,_:!0,"\\limits":!0,"\\nolimits":!0},Gn=function(){function e(e,t,r){this.settings=void 0,this.expansionCount=void 0,this.lexer=void 0,this.macros=void 0,this.stack=void 0,this.mode=void 0,this.settings=t,this.expansionCount=0,this.feed(e),this.macros=new On(Hn,t.macros),this.mode=r,this.stack=[]}var t=e.prototype;return t.feed=function(e){this.lexer=new Rn(e,this.settings)},t.switchMode=function(e){this.mode=e},t.beginGroup=function(){this.macros.beginGroup()},t.endGroup=function(){this.macros.endGroup()},t.endGroups=function(){this.macros.endGroups()},t.future=function(){return 0===this.stack.length&&this.pushToken(this.lexer.lex()),this.stack[this.stack.length-1]},t.popToken=function(){return this.future(),this.stack.pop()},t.pushToken=function(e){this.stack.push(e)},t.pushTokens=function(e){var t;(t=this.stack).push.apply(t,e)},t.scanArgument=function(e){var t,r,n;if(e){if(this.consumeSpaces(),"["!==this.future().text)return null;t=this.popToken();var a=this.consumeArg(["]"]);n=a.tokens,r=a.end}else{var i=this.consumeArg();n=i.tokens,t=i.start,r=i.end}return this.pushToken(new Dr("EOF",r.loc)),this.pushTokens(n),t.range(r,"")},t.consumeSpaces=function(){for(;;){if(" "!==this.future().text)break;this.stack.pop()}},t.consumeArg=function(e){var t=[],r=e&&e.length>0;r||this.consumeSpaces();var a,i=this.future(),o=0,s=0;do{if(a=this.popToken(),t.push(a),"{"===a.text)++o;else if("}"===a.text){if(-1===--o)throw new n("Extra }",a)}else if("EOF"===a.text)throw new n("Unexpected end of input in a macro argument, expected '"+(e&&r?e[s]:"}")+"'",a);if(e&&r)if((0===o||1===o&&"{"===e[s])&&a.text===e[s]){if(++s===e.length){t.splice(-s,s);break}}else s=0}while(0!==o||r);return"{"===i.text&&"}"===t[t.length-1].text&&(t.pop(),t.shift()),t.reverse(),{tokens:t,start:i,end:a}},t.consumeArgs=function(e,t){if(t){if(t.length!==e+1)throw new n("The length of delimiters doesn't match the number of args!");for(var r=t[0],a=0;athis.settings.maxExpand)throw new n("Too many expansions: infinite loop or need to increase maxExpand setting");var i=a.tokens,o=this.consumeArgs(a.numArgs,a.delimiters);if(a.numArgs)for(var s=(i=i.slice()).length-1;s>=0;--s){var l=i[s];if("#"===l.text){if(0===s)throw new n("Incomplete placeholder at end of macro body",l);if("#"===(l=i[--s]).text)i.splice(s+1,1);else{if(!/^[1-9]$/.test(l.text))throw new n("Not a valid argument number",l);var h;(h=i).splice.apply(h,[s,2].concat(o[+l.text-1]))}}}return this.pushTokens(i),i},t.expandAfterFuture=function(){return this.expandOnce(),this.future()},t.expandNextToken=function(){for(;;){var e=this.expandOnce();if(e instanceof Dr)return e.treatAsRelax&&(e.text="\\relax"),this.stack.pop()}throw new Error},t.expandMacro=function(e){return this.macros.has(e)?this.expandTokens([new Dr(e)]):void 0},t.expandTokens=function(e){var t=[],r=this.stack.length;for(this.pushTokens(e);this.stack.length>r;){var n=this.expandOnce(!0);n instanceof Dr&&(n.treatAsRelax&&(n.noexpand=!1,n.treatAsRelax=!1),t.push(this.stack.pop()))}return t},t.expandMacroAsText=function(e){var t=this.expandMacro(e);return t?t.map((function(e){return e.text})).join(""):t},t._getExpansion=function(e){var t=this.macros.get(e);if(null==t)return t;if(1===e.length){var r=this.lexer.catcodes[e];if(null!=r&&13!==r)return}var n="function"==typeof t?t(this):t;if("string"==typeof n){var a=0;if(-1!==n.indexOf("#"))for(var i=n.replace(/##/g,"");-1!==i.indexOf("#"+(a+1));)++a;for(var o=new Rn(n,this.settings),s=[],l=o.lex();"EOF"!==l.text;)s.push(l),l=o.lex();return s.reverse(),{tokens:s,numArgs:a}}return n},t.isDefined=function(e){return this.macros.has(e)||Nn.hasOwnProperty(e)||ae.math.hasOwnProperty(e)||ae.text.hasOwnProperty(e)||Vn.hasOwnProperty(e)},t.isExpandable=function(e){var t=this.macros.get(e);return null!=t?"string"==typeof t||"function"==typeof t||!t.unexpandable:Nn.hasOwnProperty(e)&&!Nn[e].primitive},e}(),Un={"\u0301":{text:"\\'",math:"\\acute"},"\u0300":{text:"\\`",math:"\\grave"},"\u0308":{text:'\\"',math:"\\ddot"},"\u0303":{text:"\\~",math:"\\tilde"},"\u0304":{text:"\\=",math:"\\bar"},"\u0306":{text:"\\u",math:"\\breve"},"\u030c":{text:"\\v",math:"\\check"},"\u0302":{text:"\\^",math:"\\hat"},"\u0307":{text:"\\.",math:"\\dot"},"\u030a":{text:"\\r",math:"\\mathring"},"\u030b":{text:"\\H"},"\u0327":{text:"\\c"}},Yn={"\xe1":"a\u0301","\xe0":"a\u0300","\xe4":"a\u0308","\u01df":"a\u0308\u0304","\xe3":"a\u0303","\u0101":"a\u0304","\u0103":"a\u0306","\u1eaf":"a\u0306\u0301","\u1eb1":"a\u0306\u0300","\u1eb5":"a\u0306\u0303","\u01ce":"a\u030c","\xe2":"a\u0302","\u1ea5":"a\u0302\u0301","\u1ea7":"a\u0302\u0300","\u1eab":"a\u0302\u0303","\u0227":"a\u0307","\u01e1":"a\u0307\u0304","\xe5":"a\u030a","\u01fb":"a\u030a\u0301","\u1e03":"b\u0307","\u0107":"c\u0301","\u1e09":"c\u0327\u0301","\u010d":"c\u030c","\u0109":"c\u0302","\u010b":"c\u0307","\xe7":"c\u0327","\u010f":"d\u030c","\u1e0b":"d\u0307","\u1e11":"d\u0327","\xe9":"e\u0301","\xe8":"e\u0300","\xeb":"e\u0308","\u1ebd":"e\u0303","\u0113":"e\u0304","\u1e17":"e\u0304\u0301","\u1e15":"e\u0304\u0300","\u0115":"e\u0306","\u1e1d":"e\u0327\u0306","\u011b":"e\u030c","\xea":"e\u0302","\u1ebf":"e\u0302\u0301","\u1ec1":"e\u0302\u0300","\u1ec5":"e\u0302\u0303","\u0117":"e\u0307","\u0229":"e\u0327","\u1e1f":"f\u0307","\u01f5":"g\u0301","\u1e21":"g\u0304","\u011f":"g\u0306","\u01e7":"g\u030c","\u011d":"g\u0302","\u0121":"g\u0307","\u0123":"g\u0327","\u1e27":"h\u0308","\u021f":"h\u030c","\u0125":"h\u0302","\u1e23":"h\u0307","\u1e29":"h\u0327","\xed":"i\u0301","\xec":"i\u0300","\xef":"i\u0308","\u1e2f":"i\u0308\u0301","\u0129":"i\u0303","\u012b":"i\u0304","\u012d":"i\u0306","\u01d0":"i\u030c","\xee":"i\u0302","\u01f0":"j\u030c","\u0135":"j\u0302","\u1e31":"k\u0301","\u01e9":"k\u030c","\u0137":"k\u0327","\u013a":"l\u0301","\u013e":"l\u030c","\u013c":"l\u0327","\u1e3f":"m\u0301","\u1e41":"m\u0307","\u0144":"n\u0301","\u01f9":"n\u0300","\xf1":"n\u0303","\u0148":"n\u030c","\u1e45":"n\u0307","\u0146":"n\u0327","\xf3":"o\u0301","\xf2":"o\u0300","\xf6":"o\u0308","\u022b":"o\u0308\u0304","\xf5":"o\u0303","\u1e4d":"o\u0303\u0301","\u1e4f":"o\u0303\u0308","\u022d":"o\u0303\u0304","\u014d":"o\u0304","\u1e53":"o\u0304\u0301","\u1e51":"o\u0304\u0300","\u014f":"o\u0306","\u01d2":"o\u030c","\xf4":"o\u0302","\u1ed1":"o\u0302\u0301","\u1ed3":"o\u0302\u0300","\u1ed7":"o\u0302\u0303","\u022f":"o\u0307","\u0231":"o\u0307\u0304","\u0151":"o\u030b","\u1e55":"p\u0301","\u1e57":"p\u0307","\u0155":"r\u0301","\u0159":"r\u030c","\u1e59":"r\u0307","\u0157":"r\u0327","\u015b":"s\u0301","\u1e65":"s\u0301\u0307","\u0161":"s\u030c","\u1e67":"s\u030c\u0307","\u015d":"s\u0302","\u1e61":"s\u0307","\u015f":"s\u0327","\u1e97":"t\u0308","\u0165":"t\u030c","\u1e6b":"t\u0307","\u0163":"t\u0327","\xfa":"u\u0301","\xf9":"u\u0300","\xfc":"u\u0308","\u01d8":"u\u0308\u0301","\u01dc":"u\u0308\u0300","\u01d6":"u\u0308\u0304","\u01da":"u\u0308\u030c","\u0169":"u\u0303","\u1e79":"u\u0303\u0301","\u016b":"u\u0304","\u1e7b":"u\u0304\u0308","\u016d":"u\u0306","\u01d4":"u\u030c","\xfb":"u\u0302","\u016f":"u\u030a","\u0171":"u\u030b","\u1e7d":"v\u0303","\u1e83":"w\u0301","\u1e81":"w\u0300","\u1e85":"w\u0308","\u0175":"w\u0302","\u1e87":"w\u0307","\u1e98":"w\u030a","\u1e8d":"x\u0308","\u1e8b":"x\u0307","\xfd":"y\u0301","\u1ef3":"y\u0300","\xff":"y\u0308","\u1ef9":"y\u0303","\u0233":"y\u0304","\u0177":"y\u0302","\u1e8f":"y\u0307","\u1e99":"y\u030a","\u017a":"z\u0301","\u017e":"z\u030c","\u1e91":"z\u0302","\u017c":"z\u0307","\xc1":"A\u0301","\xc0":"A\u0300","\xc4":"A\u0308","\u01de":"A\u0308\u0304","\xc3":"A\u0303","\u0100":"A\u0304","\u0102":"A\u0306","\u1eae":"A\u0306\u0301","\u1eb0":"A\u0306\u0300","\u1eb4":"A\u0306\u0303","\u01cd":"A\u030c","\xc2":"A\u0302","\u1ea4":"A\u0302\u0301","\u1ea6":"A\u0302\u0300","\u1eaa":"A\u0302\u0303","\u0226":"A\u0307","\u01e0":"A\u0307\u0304","\xc5":"A\u030a","\u01fa":"A\u030a\u0301","\u1e02":"B\u0307","\u0106":"C\u0301","\u1e08":"C\u0327\u0301","\u010c":"C\u030c","\u0108":"C\u0302","\u010a":"C\u0307","\xc7":"C\u0327","\u010e":"D\u030c","\u1e0a":"D\u0307","\u1e10":"D\u0327","\xc9":"E\u0301","\xc8":"E\u0300","\xcb":"E\u0308","\u1ebc":"E\u0303","\u0112":"E\u0304","\u1e16":"E\u0304\u0301","\u1e14":"E\u0304\u0300","\u0114":"E\u0306","\u1e1c":"E\u0327\u0306","\u011a":"E\u030c","\xca":"E\u0302","\u1ebe":"E\u0302\u0301","\u1ec0":"E\u0302\u0300","\u1ec4":"E\u0302\u0303","\u0116":"E\u0307","\u0228":"E\u0327","\u1e1e":"F\u0307","\u01f4":"G\u0301","\u1e20":"G\u0304","\u011e":"G\u0306","\u01e6":"G\u030c","\u011c":"G\u0302","\u0120":"G\u0307","\u0122":"G\u0327","\u1e26":"H\u0308","\u021e":"H\u030c","\u0124":"H\u0302","\u1e22":"H\u0307","\u1e28":"H\u0327","\xcd":"I\u0301","\xcc":"I\u0300","\xcf":"I\u0308","\u1e2e":"I\u0308\u0301","\u0128":"I\u0303","\u012a":"I\u0304","\u012c":"I\u0306","\u01cf":"I\u030c","\xce":"I\u0302","\u0130":"I\u0307","\u0134":"J\u0302","\u1e30":"K\u0301","\u01e8":"K\u030c","\u0136":"K\u0327","\u0139":"L\u0301","\u013d":"L\u030c","\u013b":"L\u0327","\u1e3e":"M\u0301","\u1e40":"M\u0307","\u0143":"N\u0301","\u01f8":"N\u0300","\xd1":"N\u0303","\u0147":"N\u030c","\u1e44":"N\u0307","\u0145":"N\u0327","\xd3":"O\u0301","\xd2":"O\u0300","\xd6":"O\u0308","\u022a":"O\u0308\u0304","\xd5":"O\u0303","\u1e4c":"O\u0303\u0301","\u1e4e":"O\u0303\u0308","\u022c":"O\u0303\u0304","\u014c":"O\u0304","\u1e52":"O\u0304\u0301","\u1e50":"O\u0304\u0300","\u014e":"O\u0306","\u01d1":"O\u030c","\xd4":"O\u0302","\u1ed0":"O\u0302\u0301","\u1ed2":"O\u0302\u0300","\u1ed6":"O\u0302\u0303","\u022e":"O\u0307","\u0230":"O\u0307\u0304","\u0150":"O\u030b","\u1e54":"P\u0301","\u1e56":"P\u0307","\u0154":"R\u0301","\u0158":"R\u030c","\u1e58":"R\u0307","\u0156":"R\u0327","\u015a":"S\u0301","\u1e64":"S\u0301\u0307","\u0160":"S\u030c","\u1e66":"S\u030c\u0307","\u015c":"S\u0302","\u1e60":"S\u0307","\u015e":"S\u0327","\u0164":"T\u030c","\u1e6a":"T\u0307","\u0162":"T\u0327","\xda":"U\u0301","\xd9":"U\u0300","\xdc":"U\u0308","\u01d7":"U\u0308\u0301","\u01db":"U\u0308\u0300","\u01d5":"U\u0308\u0304","\u01d9":"U\u0308\u030c","\u0168":"U\u0303","\u1e78":"U\u0303\u0301","\u016a":"U\u0304","\u1e7a":"U\u0304\u0308","\u016c":"U\u0306","\u01d3":"U\u030c","\xdb":"U\u0302","\u016e":"U\u030a","\u0170":"U\u030b","\u1e7c":"V\u0303","\u1e82":"W\u0301","\u1e80":"W\u0300","\u1e84":"W\u0308","\u0174":"W\u0302","\u1e86":"W\u0307","\u1e8c":"X\u0308","\u1e8a":"X\u0307","\xdd":"Y\u0301","\u1ef2":"Y\u0300","\u0178":"Y\u0308","\u1ef8":"Y\u0303","\u0232":"Y\u0304","\u0176":"Y\u0302","\u1e8e":"Y\u0307","\u0179":"Z\u0301","\u017d":"Z\u030c","\u1e90":"Z\u0302","\u017b":"Z\u0307","\u03ac":"\u03b1\u0301","\u1f70":"\u03b1\u0300","\u1fb1":"\u03b1\u0304","\u1fb0":"\u03b1\u0306","\u03ad":"\u03b5\u0301","\u1f72":"\u03b5\u0300","\u03ae":"\u03b7\u0301","\u1f74":"\u03b7\u0300","\u03af":"\u03b9\u0301","\u1f76":"\u03b9\u0300","\u03ca":"\u03b9\u0308","\u0390":"\u03b9\u0308\u0301","\u1fd2":"\u03b9\u0308\u0300","\u1fd1":"\u03b9\u0304","\u1fd0":"\u03b9\u0306","\u03cc":"\u03bf\u0301","\u1f78":"\u03bf\u0300","\u03cd":"\u03c5\u0301","\u1f7a":"\u03c5\u0300","\u03cb":"\u03c5\u0308","\u03b0":"\u03c5\u0308\u0301","\u1fe2":"\u03c5\u0308\u0300","\u1fe1":"\u03c5\u0304","\u1fe0":"\u03c5\u0306","\u03ce":"\u03c9\u0301","\u1f7c":"\u03c9\u0300","\u038e":"\u03a5\u0301","\u1fea":"\u03a5\u0300","\u03ab":"\u03a5\u0308","\u1fe9":"\u03a5\u0304","\u1fe8":"\u03a5\u0306","\u038f":"\u03a9\u0301","\u1ffa":"\u03a9\u0300"},Xn=function(){function e(e,t){this.mode=void 0,this.gullet=void 0,this.settings=void 0,this.leftrightDepth=void 0,this.nextToken=void 0,this.mode="math",this.gullet=new Gn(e,t,this.mode),this.settings=t,this.leftrightDepth=0}var t=e.prototype;return t.expect=function(e,t){if(void 0===t&&(t=!0),this.fetch().text!==e)throw new n("Expected '"+e+"', got '"+this.fetch().text+"'",this.fetch());t&&this.consume()},t.consume=function(){this.nextToken=null},t.fetch=function(){return null==this.nextToken&&(this.nextToken=this.gullet.expandNextToken()),this.nextToken},t.switchMode=function(e){this.mode=e,this.gullet.switchMode(e)},t.parse=function(){this.settings.globalGroup||this.gullet.beginGroup(),this.settings.colorIsTextColor&&this.gullet.macros.set("\\color","\\textcolor");try{var e=this.parseExpression(!1);return this.expect("EOF"),this.settings.globalGroup||this.gullet.endGroup(),e}finally{this.gullet.endGroups()}},t.subparse=function(e){var t=this.nextToken;this.consume(),this.gullet.pushToken(new Dr("}")),this.gullet.pushTokens(e);var r=this.parseExpression(!1);return this.expect("}"),this.nextToken=t,r},t.parseExpression=function(t,r){for(var n=[];;){"math"===this.mode&&this.consumeSpaces();var a=this.fetch();if(-1!==e.endOfExpression.indexOf(a.text))break;if(r&&a.text===r)break;if(t&&Nn[a.text]&&Nn[a.text].infix)break;var i=this.parseAtom(r);if(!i)break;"internal"!==i.type&&n.push(i)}return"text"===this.mode&&this.formLigatures(n),this.handleInfixNodes(n)},t.handleInfixNodes=function(e){for(var t,r=-1,a=0;a=0&&this.settings.reportNonstrict("unicodeTextInMathMode",'Latin-1/Unicode text character "'+t[0]+'" used in math mode',e);var s,l=ae[this.mode][t].group,h=Lr.range(e);if(te.hasOwnProperty(l)){var m=l;s={type:"atom",mode:this.mode,family:m,loc:h,text:t}}else s={type:l,mode:this.mode,loc:h,text:t};i=s}else{if(!(t.charCodeAt(0)>=128))return null;this.settings.strict&&(S(t.charCodeAt(0))?"math"===this.mode&&this.settings.reportNonstrict("unicodeTextInMathMode",'Unicode text character "'+t[0]+'" used in math mode',e):this.settings.reportNonstrict("unknownSymbol",'Unrecognized Unicode character "'+t[0]+'" ('+t.charCodeAt(0)+")",e)),i={type:"textord",mode:"text",loc:Lr.range(e),text:t}}if(this.consume(),o)for(var c=0;c=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},m=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},v=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0 mjx-mid"]={"margin-top":this.em(-p/2),"margin-bottom":this.em(-p/2)}}l&&(h["border-top-width"]=this.em0(l-.03)),u&&(h["border-bottom-width"]=this.em0(u-.03),t[f+"mjx-stretchy-v"+e+" > mjx-end"]={"margin-top":this.em(-u)}),Object.keys(h).length&&(t[f+"mjx-stretchy-v"+e+" > mjx-ext"]=h)},s.prototype.addDelimiterVPart=function(t,e,r,n,i){if(!i)return 0;var o=this.getDelimiterData(i),a=(r-o[2])/2,s={content:this.charContent(i)};return"ext"!==n?s.padding=this.padding(o,a):a&&(s["padding-left"]=this.em0(a)),t[this.cssRoot+"mjx-stretchy-v"+e+" mjx-"+n+" mjx-c::before"]=s,o[0]+o[1]},s.prototype.addDelimiterHStyles=function(t,e,r){var n=v(r.stretch,4),i=n[0],o=n[1],a=n[2],s=n[3];this.addDelimiterHPart(t,e,"beg",i),this.addDelimiterHPart(t,e,"ext",o,!(i||a)),this.addDelimiterHPart(t,e,"end",a),s&&(this.addDelimiterHPart(t,e,"mid",s),t[this.cssRoot+"mjx-stretchy-h"+e+" > mjx-ext"]={width:"50%"})},s.prototype.addDelimiterHPart=function(t,e,r,n,i){if(void 0===i&&(i=!1),!n)return 0;var o=this.getDelimiterData(n),a=o[3],s={content:a&&a.c?'"'+a.c+'"':this.charContent(n)};"ext"===r&&!i||(s.padding=this.padding(o,0,-o[2])),t[this.cssRoot+"mjx-stretchy-h"+e+" mjx-"+r+" mjx-c::before"]=s},s.prototype.addCharStyles=function(t,e,r,n,i){var o=v(n,4),a=(o[0],o[1],o[2]),s=o[3];if(!this.options.adaptiveCSS||s.used){var c={},l="mjx-c"+this.charSelector(r),u=this.cssRoot;c.padding=this.padding(n,0,s.ic||0);var h=s.c?'"'+s.c+'"':this.charContent(r);i.get(r)!==h&&(i.has(r)||s.c?t[u+e+" "+l+"::before"]={content:h}:(t[u+l+"::before"]={content:h},i.set(r,h))),void 0!==s.f&&(c["font-family"]="MJXZERO, MJXTEX"+(s.f?"-"+s.f:""));var f=(e?e+" ":"")+l;if(t[u+f]=c,s.ic){var p=v([u+"mjx-","[noIC]"+f+":last-child"],2),d=p[0],m=p[1];t[d+"mi"+m]=t[d+"mo"+m]={"padding-right":this.em(a)}}}},s.prototype.getDelimiterData=function(t){return this.getChar("-smallop",t)},s.charOptions=function(t,e){return h.charOptions.call(this,t,e)},s.prototype.em=function(t){return o.em(t)},s.prototype.em0=function(t){return o.em(Math.max(0,t))},s.prototype.padding=function(t,e,r){var n=v(t,3),i=n[0],o=n[1];return void 0===e&&(e=0),void 0===r&&(r=0),[i,n[2]+r,o,e].map(this.em0).join(" ")},s.prototype.charContent=function(t){return'"'+(32<=t&&t<=126&&34!==t&&39!==t&&92!==t?String.fromCharCode(t):"\\"+t.toString(16).toUpperCase())+'"'},s.prototype.charSelector=function(t){return".mjx-c"+t.toString(16).toUpperCase()},s.OPTIONS={fontURL:"js/output/chtml/fonts/tex-woff-v2"},s.defaultVariantClasses={},s.defaultStyles={"mjx-c::before":{display:"inline-block",width:0}},s.defaultFonts={"@font-face /* 0 */":{"font-family":"MJXZERO",src:'url("%%URL%%/MathJax_Zero.woff") format("woff")'}},s);function s(t){var e,r;void 0===t&&(t=null);var n=h.call(this)||this;n.cssRoot="";var i=n.constructor;n.options=u.userOptions(u.defaultOptions({},i.OPTIONS),t);try{for(var o=y(Object.keys(i.defaultVariantClasses)),a=o.next();!a.done;a=o.next()){var s=a.value;n.variant[s].classes=i.defaultVariantClasses[s]}}catch(t){e={error:t}}finally{try{a&&!a.done&&(r=o.return)&&r.call(o)}finally{if(e)throw e.error}}return n}r.CHTMLFontData=a,r.AddCSS=function(t,e){var r,n;try{for(var i=y(Object.keys(e)),o=i.next();!o.done;o=i.next()){var a=o.value,s=parseInt(a);Object.assign(c.FontData.charOptions(t,s),e[s])}}catch(t){r={error:t}}finally{try{o&&!o.done&&(n=i.return)&&n.call(i)}finally{if(r)throw r.error}}return t}},function(t,u,e){"use strict";var n,r,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),h=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},f=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},r=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};function s(t,e){var r,n;try{for(var i=l(Object.keys(e)),o=i.next();!o.done;o=i.next()){var a=o.value;"__esModule"!==a&&("object"==typeof t[a]&&"object"==typeof e[a]?s(t[a],e[a]):null!==e[a]&&void 0!==e[a]&&(t[a]=e[a]))}}catch(t){r={error:t}}finally{try{o&&!o.done&&(n=i.return)&&n.call(i)}finally{if(r)throw r.error}}return t}Object.defineProperty(e,"__esModule",{value:!0}),e.combineConfig=s,e.combineDefaults=function t(e,r,n){var i,o;e[r]||(e[r]={}),e=e[r];try{for(var a=l(Object.keys(n)),s=a.next();!s.done;s=a.next()){var c=s.value;"object"==typeof e[c]&&"object"==typeof n[c]?t(e,c,n[c]):null==e[c]&&null!=n[c]&&(e[c]=n[c])}}catch(t){i={error:t}}finally{try{s&&!s.done&&(o=a.return)&&o.call(a)}finally{if(i)throw i.error}}return e},e.combineWithMathJax=function(t){return s(e.MathJax,t)},void 0===t.MathJax&&(t.MathJax={}),t.MathJax.version||(t.MathJax={version:"3.0.0",_:{},config:t.MathJax}),e.MathJax=t.MathJax}).call(this,r(28))},function(t,e,r){"use strict";var l=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},n=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var n,i,a,o,s,c,h,f=r(0),p=r(6),d=r(21),m=r(4),y=r(12);function v(t,e){void 0===e&&(e=!1);var r=t.match(e?h:c);return r?[r[1].replace(/,/,"."),r[4],r[0].length]:[null,null,0]}function b(t,e,r){"{"!==e&&"}"!==e||(e="\\"+e);var n="{\\bigg"+r+" "+e+"}",i="{\\big"+r+" "+e+"}";return new d.default("\\mathchoice"+n+i+i+i,{},t).mml()}function g(t,e,r){e=e.replace(/^\s+/,y.entities.nbsp).replace(/\s+$/,y.entities.nbsp);var n=t.create("text",e);return t.create("node","mtext",[],r,n)}function M(t,e,r){if(r.match(/^[a-z]/i)&&e.match(/(^|[^\\])(\\\\)*\\[a-z]+$/i)&&(e+=" "),e.length+r.length>t.configuration.options.maxBuffer)throw new m.default("MaxBufferSize","MathJax internal buffer size exceeded; is there a recursive macro call?");return e+r}function O(t,e){for(;0e.length)throw new m.default("IllegalMacroParam","Illegal macro parameter reference");i=M(t,M(t,i,n),e[parseInt(a,10)-1]),n=""}else n+=a}return M(t,i,n)},i.addArgs=M,i.checkEqnEnv=function(t){if(t.stack.global.eqnenv)throw new m.default("ErroneousNestingEq","Erroneous nesting of equation structures");t.stack.global.eqnenv=!0},i.MmlFilterAttribute=function(t,e,r){return r},i.getFontDef=function(t){var e=t.stack.env.font;return e?{mathvariant:e}:{}},i.keyvalOptions=function(t,e,r){var n,i;void 0===e&&(e=null),void 0===r&&(r=!1);var o=function(t){for(var e,r,n,i,o,a={},s=t;s;)e=l(x(s,["=",","]),3),i=e[0],n=e[1],s=e[2],"="===n?(r=l(x(s,[","]),3),o=r[0],n=r[1],s=r[2],o="false"===o||"true"===o?JSON.parse(o):o,a[i]=o):i&&(a[i]=!0);return a}(t);if(e)try{for(var a=u(Object.keys(o)),s=a.next();!s.done;s=a.next()){var c=s.value;if(!e.hasOwnProperty(c)){if(r)throw new m.default("InvalidOption","Invalid optional argument: %1",c);delete o[c]}}}catch(t){n={error:t}}finally{try{s&&!s.done&&(i=a.return)&&i.call(a)}finally{if(n)throw n.error}}return o},e.default=n},function(t,e,r){"use strict";var n,i,o,l=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},u=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},c=this&&this.__spread||function(){for(var t=[],e=0;e=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var b,n,i,o=r(33),a=r(8),L=r(3),s=r(9),g=r(8),M=r(25),c=(l.create=function(t,e){return void 0===e&&(e={}),new l(t,e.handler||{},e.fallback||{},e.items||{},e.tags||{},e.options||{},e.nodes||{},e.preprocessors||[],e.postprocessors||[],[e.init,e.priority],[e.config,e.configPriority])},l.empty=function(){return l.create("empty")},l.extension=function(){return new s.MacroMap(a.ExtensionMaps.NEW_MACRO,{},{}),new s.DelimiterMap(a.ExtensionMaps.NEW_DELIMITER,o.default.delimiter,{}),new s.CommandMap(a.ExtensionMaps.NEW_COMMAND,{},{}),new s.EnvironmentMap(a.ExtensionMaps.NEW_ENVIRONMENT,o.default.environment,{},{}),l.create("extension",{handler:{character:[],delimiter:[a.ExtensionMaps.NEW_DELIMITER],macro:[a.ExtensionMaps.NEW_DELIMITER,a.ExtensionMaps.NEW_COMMAND,a.ExtensionMaps.NEW_MACRO],environment:[a.ExtensionMaps.NEW_ENVIRONMENT]}})},l.prototype.init=function(t){this.initMethod.execute(t)},l.prototype.config=function(t,e){var r,n,i,o;this.configMethod.execute(t,e);try{for(var a=I(this.preprocessors),s=a.next();!s.done;s=a.next()){var c=s.value;"function"==typeof c?e.preFilters.add(c):e.preFilters.add(c[0],c[1])}}catch(t){r={error:t}}finally{try{s&&!s.done&&(n=a.return)&&n.call(a)}finally{if(r)throw r.error}}try{for(var l=I(this.postprocessors),u=l.next();!u.done;u=l.next()){var h=u.value;"function"==typeof h?e.postFilters.add(h):e.postFilters.add(h[0],h[1])}}catch(t){i={error:t}}finally{try{u&&!u.done&&(o=l.return)&&o.call(l)}finally{if(i)throw i.error}}},l.prototype.append=function(t){var e,r,n,i,o,a,s,c,l,u,h,f,p=Object.keys(t.handler);try{for(var d=I(p),m=d.next();!m.done;m=d.next()){var y=m.value;try{for(var v=(n=void 0,I(t.handler[y])),b=v.next();!b.done;b=v.next()){var g=b.value;this.handler[y].unshift(g)}}catch(t){n={error:t}}finally{try{b&&!b.done&&(i=v.return)&&i.call(v)}finally{if(n)throw n.error}}}}catch(t){e={error:t}}finally{try{m&&!m.done&&(r=d.return)&&r.call(d)}finally{if(e)throw e.error}}Object.assign(this.fallback,t.fallback),Object.assign(this.items,t.items),Object.assign(this.tags,t.tags),L.defaultOptions(this.options,t.options),Object.assign(this.nodes,t.nodes);try{for(var M=I(t.preprocessors),O=M.next();!O.done;O=M.next()){var x=O.value;this.preprocessors.push(x)}}catch(t){o={error:t}}finally{try{O&&!O.done&&(a=M.return)&&a.call(M)}finally{if(o)throw o.error}}try{for(var S=I(t.postprocessors),E=S.next();!E.done;E=S.next()){var C=E.value;this.postprocessors.push(C)}}catch(t){s={error:t}}finally{try{E&&!E.done&&(c=S.return)&&c.call(S)}finally{if(s)throw s.error}}try{for(var _=I(t.initMethod),T=_.next();!T.done;T=_.next()){var w=T.value;this.initMethod.add(w.item,w.priority)}}catch(t){l={error:t}}finally{try{T&&!T.done&&(u=_.return)&&u.call(_)}finally{if(l)throw l.error}}try{for(var A=I(t.configMethod),k=A.next();!k.done;k=A.next())w=k.value,this.configMethod.add(w.item,w.priority)}catch(t){h={error:t}}finally{try{k&&!k.done&&(f=A.return)&&f.call(A)}finally{if(h)throw h.error}}},l.prototype.register=function(t,e,r){var n,i,o,a,s,c;void 0===r&&(r={}),this.append(t),t.init(this);var l=e.parseOptions;l.handlers=new g.SubHandlers(this),l.nodeFactory.setCreators(t.nodes);try{for(var u=I(Object.keys(t.items)),h=u.next();!h.done;h=u.next()){var f=h.value;l.itemFactory.setNodeClass(f,t.items[f])}}catch(t){n={error:t}}finally{try{h&&!h.done&&(i=u.return)&&i.call(u)}finally{if(n)throw n.error}}L.defaultOptions(l.options,t.options),L.userOptions(l.options,r),t.config(this,e);try{for(var p=I(t.preprocessors),d=p.next();!d.done;d=p.next()){var m=d.value;Array.isArray(m)?e.preFilters.add(m[0],m[1]):e.preFilters.add(m)}}catch(t){o={error:t}}finally{try{d&&!d.done&&(a=p.return)&&a.call(p)}finally{if(o)throw o.error}}try{for(var y=I(t.postprocessors),v=y.next();!v.done;v=y.next()){var b=v.value;Array.isArray(b)?e.postFilters.add(b[0],b[1]):e.postFilters.add(b)}}catch(t){s={error:t}}finally{try{v&&!v.done&&(c=y.return)&&c.call(y)}finally{if(s)throw s.error}}},l);function l(t,e,r,n,i,o,a,s,c,l,u){void 0===e&&(e={}),void 0===r&&(r={}),void 0===n&&(n={}),void 0===i&&(i={}),void 0===o&&(o={}),void 0===a&&(a={}),void 0===s&&(s=[]),void 0===c&&(c=[]);var h=v(l,2),f=h[0],p=h[1],d=v(u,2),m=d[0],y=d[1];this.name=t,this.handler=e,this.fallback=r,this.items=n,this.tags=i,this.options=o,this.nodes=a,this.preprocessors=s,this.postprocessors=c,this.initMethod=new M.FunctionList,this.configMethod=new M.FunctionList,f&&this.initMethod.add(f,p||0),m&&this.configMethod.add(m,y||p||0),this.handler=Object.assign({character:[],delimiter:[],macro:[],environment:[]},e),b.set(t,this)}e.Configuration=c,n=b=e.ConfigurationHandler||(e.ConfigurationHandler={}),i=new Map,n.set=function(t,e){i.set(t,e)},n.get=function(t){return i.get(t)},n.keys=function(){return i.keys()}},function(t,n,e){"use strict";Object.defineProperty(n,"__esModule",{value:!0});var i=e(69),o=e(103);n.options={loadMissingEntities:!0},n.entities={ApplyFunction:"\u2061",Backslash:"\u2216",Because:"\u2235",Breve:"\u02d8",Cap:"\u22d2",CenterDot:"\xb7",CircleDot:"\u2299",CircleMinus:"\u2296",CirclePlus:"\u2295",CircleTimes:"\u2297",Congruent:"\u2261",ContourIntegral:"\u222e",Coproduct:"\u2210",Cross:"\u2a2f",Cup:"\u22d3",CupCap:"\u224d",Dagger:"\u2021",Del:"\u2207",Delta:"\u0394",Diamond:"\u22c4",DifferentialD:"\u2146",DotEqual:"\u2250",DoubleDot:"\xa8",DoubleRightTee:"\u22a8",DoubleVerticalBar:"\u2225",DownArrow:"\u2193",DownLeftVector:"\u21bd",DownRightVector:"\u21c1",DownTee:"\u22a4",Downarrow:"\u21d3",Element:"\u2208",EqualTilde:"\u2242",Equilibrium:"\u21cc",Exists:"\u2203",ExponentialE:"\u2147",FilledVerySmallSquare:"\u25aa",ForAll:"\u2200",Gamma:"\u0393",Gg:"\u22d9",GreaterEqual:"\u2265",GreaterEqualLess:"\u22db",GreaterFullEqual:"\u2267",GreaterLess:"\u2277",GreaterSlantEqual:"\u2a7e",GreaterTilde:"\u2273",Hacek:"\u02c7",Hat:"^",HumpDownHump:"\u224e",HumpEqual:"\u224f",Im:"\u2111",ImaginaryI:"\u2148",Integral:"\u222b",Intersection:"\u22c2",InvisibleComma:"\u2063",InvisibleTimes:"\u2062",Lambda:"\u039b",Larr:"\u219e",LeftAngleBracket:"\u27e8",LeftArrow:"\u2190",LeftArrowRightArrow:"\u21c6",LeftCeiling:"\u2308",LeftDownVector:"\u21c3",LeftFloor:"\u230a",LeftRightArrow:"\u2194",LeftTee:"\u22a3",LeftTriangle:"\u22b2",LeftTriangleEqual:"\u22b4",LeftUpVector:"\u21bf",LeftVector:"\u21bc",Leftarrow:"\u21d0",Leftrightarrow:"\u21d4",LessEqualGreater:"\u22da",LessFullEqual:"\u2266",LessGreater:"\u2276",LessSlantEqual:"\u2a7d",LessTilde:"\u2272",Ll:"\u22d8",Lleftarrow:"\u21da",LongLeftArrow:"\u27f5",LongLeftRightArrow:"\u27f7",LongRightArrow:"\u27f6",Longleftarrow:"\u27f8",Longleftrightarrow:"\u27fa",Longrightarrow:"\u27f9",Lsh:"\u21b0",MinusPlus:"\u2213",NestedGreaterGreater:"\u226b",NestedLessLess:"\u226a",NotDoubleVerticalBar:"\u2226",NotElement:"\u2209",NotEqual:"\u2260",NotExists:"\u2204",NotGreater:"\u226f",NotGreaterEqual:"\u2271",NotLeftTriangle:"\u22ea",NotLeftTriangleEqual:"\u22ec",NotLess:"\u226e",NotLessEqual:"\u2270",NotPrecedes:"\u2280",NotPrecedesSlantEqual:"\u22e0",NotRightTriangle:"\u22eb",NotRightTriangleEqual:"\u22ed",NotSubsetEqual:"\u2288",NotSucceeds:"\u2281",NotSucceedsSlantEqual:"\u22e1",NotSupersetEqual:"\u2289",NotTilde:"\u2241",NotVerticalBar:"\u2224",Omega:"\u03a9",OverBar:"\u203e",OverBrace:"\u23de",PartialD:"\u2202",Phi:"\u03a6",Pi:"\u03a0",PlusMinus:"\xb1",Precedes:"\u227a",PrecedesEqual:"\u2aaf",PrecedesSlantEqual:"\u227c",PrecedesTilde:"\u227e",Product:"\u220f",Proportional:"\u221d",Psi:"\u03a8",Rarr:"\u21a0",Re:"\u211c",ReverseEquilibrium:"\u21cb",RightAngleBracket:"\u27e9",RightArrow:"\u2192",RightArrowLeftArrow:"\u21c4",RightCeiling:"\u2309",RightDownVector:"\u21c2",RightFloor:"\u230b",RightTee:"\u22a2",RightTeeArrow:"\u21a6",RightTriangle:"\u22b3",RightTriangleEqual:"\u22b5",RightUpVector:"\u21be",RightVector:"\u21c0",Rightarrow:"\u21d2",Rrightarrow:"\u21db",Rsh:"\u21b1",Sigma:"\u03a3",SmallCircle:"\u2218",Sqrt:"\u221a",Square:"\u25a1",SquareIntersection:"\u2293",SquareSubset:"\u228f",SquareSubsetEqual:"\u2291",SquareSuperset:"\u2290",SquareSupersetEqual:"\u2292",SquareUnion:"\u2294",Star:"\u22c6",Subset:"\u22d0",SubsetEqual:"\u2286",Succeeds:"\u227b",SucceedsEqual:"\u2ab0",SucceedsSlantEqual:"\u227d",SucceedsTilde:"\u227f",SuchThat:"\u220b",Sum:"\u2211",Superset:"\u2283",SupersetEqual:"\u2287",Supset:"\u22d1",Therefore:"\u2234",Theta:"\u0398",Tilde:"\u223c",TildeEqual:"\u2243",TildeFullEqual:"\u2245",TildeTilde:"\u2248",UnderBar:"_",UnderBrace:"\u23df",Union:"\u22c3",UnionPlus:"\u228e",UpArrow:"\u2191",UpDownArrow:"\u2195",UpTee:"\u22a5",Uparrow:"\u21d1",Updownarrow:"\u21d5",Upsilon:"\u03a5",Vdash:"\u22a9",Vee:"\u22c1",VerticalBar:"\u2223",VerticalTilde:"\u2240",Vvdash:"\u22aa",Wedge:"\u22c0",Xi:"\u039e",amp:"&",acute:"\xb4",aleph:"\u2135",alpha:"\u03b1",amalg:"\u2a3f",and:"\u2227",ang:"\u2220",angmsd:"\u2221",angsph:"\u2222",ape:"\u224a",backprime:"\u2035",backsim:"\u223d",backsimeq:"\u22cd",beta:"\u03b2",beth:"\u2136",between:"\u226c",bigcirc:"\u25ef",bigodot:"\u2a00",bigoplus:"\u2a01",bigotimes:"\u2a02",bigsqcup:"\u2a06",bigstar:"\u2605",bigtriangledown:"\u25bd",bigtriangleup:"\u25b3",biguplus:"\u2a04",blacklozenge:"\u29eb",blacktriangle:"\u25b4",blacktriangledown:"\u25be",blacktriangleleft:"\u25c2",bowtie:"\u22c8",boxdl:"\u2510",boxdr:"\u250c",boxminus:"\u229f",boxplus:"\u229e",boxtimes:"\u22a0",boxul:"\u2518",boxur:"\u2514",bsol:"\\",bull:"\u2022",cap:"\u2229",check:"\u2713",chi:"\u03c7",circ:"\u02c6",circeq:"\u2257",circlearrowleft:"\u21ba",circlearrowright:"\u21bb",circledR:"\xae",circledS:"\u24c8",circledast:"\u229b",circledcirc:"\u229a",circleddash:"\u229d",clubs:"\u2663",colon:":",comp:"\u2201",ctdot:"\u22ef",cuepr:"\u22de",cuesc:"\u22df",cularr:"\u21b6",cup:"\u222a",curarr:"\u21b7",curlyvee:"\u22ce",curlywedge:"\u22cf",dagger:"\u2020",daleth:"\u2138",ddarr:"\u21ca",deg:"\xb0",delta:"\u03b4",digamma:"\u03dd",div:"\xf7",divideontimes:"\u22c7",dot:"\u02d9",doteqdot:"\u2251",dotplus:"\u2214",dotsquare:"\u22a1",dtdot:"\u22f1",ecir:"\u2256",efDot:"\u2252",egs:"\u2a96",ell:"\u2113",els:"\u2a95",empty:"\u2205",epsi:"\u03b5",epsiv:"\u03f5",erDot:"\u2253",eta:"\u03b7",eth:"\xf0",flat:"\u266d",fork:"\u22d4",frown:"\u2322",gEl:"\u2a8c",gamma:"\u03b3",gap:"\u2a86",gimel:"\u2137",gnE:"\u2269",gnap:"\u2a8a",gne:"\u2a88",gnsim:"\u22e7",gt:">",gtdot:"\u22d7",harrw:"\u21ad",hbar:"\u210f",hellip:"\u2026",hookleftarrow:"\u21a9",hookrightarrow:"\u21aa",imath:"\u0131",infin:"\u221e",intcal:"\u22ba",iota:"\u03b9",jmath:"\u0237",kappa:"\u03ba",kappav:"\u03f0",lEg:"\u2a8b",lambda:"\u03bb",lap:"\u2a85",larrlp:"\u21ab",larrtl:"\u21a2",lbrace:"{",lbrack:"[",le:"\u2264",leftleftarrows:"\u21c7",leftthreetimes:"\u22cb",lessdot:"\u22d6",lmoust:"\u23b0",lnE:"\u2268",lnap:"\u2a89",lne:"\u2a87",lnsim:"\u22e6",longmapsto:"\u27fc",looparrowright:"\u21ac",lowast:"\u2217",loz:"\u25ca",lt:"<",ltimes:"\u22c9",ltri:"\u25c3",macr:"\xaf",malt:"\u2720",mho:"\u2127",mu:"\u03bc",multimap:"\u22b8",nLeftarrow:"\u21cd",nLeftrightarrow:"\u21ce",nRightarrow:"\u21cf",nVDash:"\u22af",nVdash:"\u22ae",natur:"\u266e",nearr:"\u2197",nharr:"\u21ae",nlarr:"\u219a",not:"\xac",nrarr:"\u219b",nu:"\u03bd",nvDash:"\u22ad",nvdash:"\u22ac",nwarr:"\u2196",omega:"\u03c9",omicron:"\u03bf",or:"\u2228",osol:"\u2298",period:".",phi:"\u03c6",phiv:"\u03d5",pi:"\u03c0",piv:"\u03d6",prap:"\u2ab7",precnapprox:"\u2ab9",precneqq:"\u2ab5",precnsim:"\u22e8",prime:"\u2032",psi:"\u03c8",quot:'"',rarrtl:"\u21a3",rbrace:"}",rbrack:"]",rho:"\u03c1",rhov:"\u03f1",rightrightarrows:"\u21c9",rightthreetimes:"\u22cc",ring:"\u02da",rmoust:"\u23b1",rtimes:"\u22ca",rtri:"\u25b9",scap:"\u2ab8",scnE:"\u2ab6",scnap:"\u2aba",scnsim:"\u22e9",sdot:"\u22c5",searr:"\u2198",sect:"\xa7",sharp:"\u266f",sigma:"\u03c3",sigmav:"\u03c2",simne:"\u2246",smile:"\u2323",spades:"\u2660",sub:"\u2282",subE:"\u2ac5",subnE:"\u2acb",subne:"\u228a",supE:"\u2ac6",supnE:"\u2acc",supne:"\u228b",swarr:"\u2199",tau:"\u03c4",theta:"\u03b8",thetav:"\u03d1",tilde:"\u02dc",times:"\xd7",triangle:"\u25b5",triangleq:"\u225c",upsi:"\u03c5",upuparrows:"\u21c8",veebar:"\u22bb",vellip:"\u22ee",weierp:"\u2118",xi:"\u03be",yen:"\xa5",zeta:"\u03b6",zigrarr:"\u21dd"};var a={};function r(t,e){if("#"===e.charAt(0))return s(e.slice(1));if(n.entities[e])return n.entities[e];if(n.options.loadMissingEntities){var r=e.match(/^[a-zA-Z](fr|scr|opf)$/)?RegExp.$1:e.charAt(0).toLowerCase();a[r]||(a[r]=!0,i.retryAfter(o.asyncLoad("./util/entities/"+r+".js")))}return t}function s(t){var e="x"===t.charAt(0)?parseInt(t.slice(1),16):parseInt(t);if(e<65536)return String.fromCharCode(e);var r=55296+((e-=65536)>>10),n=56320+(1023&e);return String.fromCharCode(r,n)}n.add=function(t,e){Object.assign(n.entities,t),a[e]=!0},n.remove=function(t){delete n.entities[t]},n.translate=function(t){return t.replace(/&([a-z][a-z0-9]*|#(?:[0-9]+|x[0-9a-f]+));/gi,r)},n.numeric=s},function(t,o,e){"use strict";Object.defineProperty(o,"__esModule",{value:!0}),o.protoItem=function(t,e,r,n,i,o,a){return void 0===a&&(a=null),{open:t,math:e,close:r,n:n,start:{n:i},end:{n:o},display:a}};var r=(n.prototype.render=function(t){t.renderActions.renderMath(this,t)},n.prototype.rerender=function(t,e){void 0===e&&(e=o.STATE.RERENDER),this.state()>=e&&this.state(e-1),t.renderActions.renderMath(this,t,e)},n.prototype.convert=function(t,e){void 0===e&&(e=o.STATE.LAST),t.renderActions.renderConvert(this,t,e)},n.prototype.compile=function(t){this.state()=o.STATE.INSERTED&&this.removeFromDocument(e),t=o.STATE.TYPESET&&(this.bbox={},this.outputData={}),t=o.STATE.COMPILED&&(this.inputData={}),this._state=t),this._state},n.prototype.reset=function(t){void 0===t&&(t=!1),this.state(o.STATE.UNPROCESSED)},n);function n(t,e,r,n,i){void 0===r&&(r=!0),void 0===n&&(n={i:0,n:0,delim:""}),void 0===i&&(i={i:0,n:0,delim:""}),this.root=null,this.typesetRoot=null,this._state=o.STATE.UNPROCESSED,this.metrics={},this.bbox={},this.inputData={},this.outputData={},this.math=t,this.inputJax=e,this.display=r,this.start=n,this.end=i,this.root=null,this.typesetRoot=null,this.metrics={},this.bbox={},this.inputData={},this.outputData={}}o.AbstractMathItem=r,o.STATE={UNPROCESSED:0,FINDMATH:10,COMPILED:20,CONVERT:100,METRICS:110,RERENDER:125,TYPESET:150,INSERTED:200,RESET:500,LAST:1e4},o.newState=function(t,e){if(t in o.STATE)throw Error("State "+t+" already exists");o.STATE[t]=e}},function(t,s,e){"use strict";Object.defineProperty(s,"__esModule",{value:!0}),s.BIGDIMEN=1e6,s.UNITS={px:1,pt:96/72,pc:8,in:96,cm:96/2.54,mm:96/25.4},s.RELUNITS={em:1,ex:.431,mu:1/18},s.MATHSPACE={veryverythinmathspace:1/18,verythinmathspace:2/18,thinmathspace:3/18,mediummathspace:4/18,thickmathspace:5/18,verythickmathspace:6/18,veryverythickmathspace:7/18,negativeveryverythinmathspace:-1/18,negativeverythinmathspace:-2/18,negativethinmathspace:-3/18,negativemediummathspace:-4/18,negativethickmathspace:-5/18,negativeverythickmathspace:-6/18,negativeveryverythickmathspace:-7/18,thin:.04,medium:.06,thick:.1,normal:1,big:2,small:1/Math.sqrt(2),infinity:s.BIGDIMEN},s.length2em=function(t,e,r,n){if(void 0===e&&(e=0),void 0===r&&(r=1),void 0===n&&(n=16),"string"!=typeof t&&(t=String(t)),""===t||null==t)return e;if(s.MATHSPACE[t])return s.MATHSPACE[t];var i=t.match(/^\s*([-+]?(?:\.\d+|\d+(?:\.\d*)?))?(pt|em|ex|mu|px|pc|in|mm|cm|%)?/);if(!i)return e;var o=parseFloat(i[1]||"1"),a=i[2];return s.UNITS.hasOwnProperty(a)?o*s.UNITS[a]/n/r:s.RELUNITS.hasOwnProperty(a)?o*s.RELUNITS[a]:"%"===a?o/100*e:o*e},s.percent=function(t){return(100*t).toFixed(1).replace(/\.?0+$/,"")+"%"},s.em=function(t){return Math.abs(t)<.001?"0":t.toFixed(3).replace(/\.?0+$/,"")+"em"},s.emRounded=function(t,e){return void 0===e&&(e=16),t=(Math.round(t*e)+.05)/e,Math.abs(t)<.001?"0em":t.toFixed(3).replace(/\.?0+$/,"")+"em"},s.px=function(t,e,r){return void 0===e&&(e=-s.BIGDIMEN),void 0===r&&(r=16),t*=r,e&&tthis.w&&(this.w=i),o>this.h&&(this.h=o),a>this.d&&(this.d=a)},o.prototype.append=function(t){var e=t.rscale;this.w+=e*(t.w+t.L+t.R),e*t.h>this.h&&(this.h=e*t.h),e*t.d>this.d&&(this.d=e*t.d)},o.prototype.updateFrom=function(t){this.h=t.h,this.d=t.d,this.w=t.w,t.pwidth&&(this.pwidth=t.pwidth)},o.fullWidth="100%",o);function o(t){void 0===t&&(t={w:0,h:-n.BIGDIMEN,d:-n.BIGDIMEN}),this.w=t.w||0,this.h="h"in t?t.h:-n.BIGDIMEN,this.d="d"in t?t.d:-n.BIGDIMEN,this.L=this.R=this.ic=this.sk=0,this.scale=this.rscale=1,this.pwidth=""}e.BBox=i},function(t,h,o){"use strict";(function(r){var l=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(h,"__esModule",{value:!0});var t,e,n=o(5),u=o(18),i=o(18);h.Package=i.Package,h.PackageError=i.PackageError,(e=t=h.Loader||(h.Loader={})).ready=function(){for(var e,t,r=[],n=0;n=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var a,p=r(17),s=(a=Error,i(c,a),c);function c(t,e){var r=a.call(this,t)||this;return r.package=e,r}e.PackageError=s;var l=(d.resolvePath=function(t,e){void 0===e&&(e=!0);var r,n=p.CONFIG.source[t]||t;for(n.match(/^(?:[a-z]+:\/)?\/|\[/)||(n="[mathjax]/"+n.replace(/^\.\//,"")),e&&!n.match(/\.[^\/]+$/)&&(n+=".js");(r=n.match(/^\[([^\]]*)\]/))&&p.CONFIG.paths.hasOwnProperty(r[1]);)n=p.CONFIG.paths[r[1]]+n.substr(r[0].length);return n},Object.defineProperty(d.prototype,"canLoad",{get:function(){return 0===this.dependencyCount&&!this.noLoad&&!this.isLoading&&!this.hasFailed},enumerable:!0,configurable:!0}),d.prototype.makeDependencies=function(){var e,t,r=[],n=d.packages,i=this.noLoad,o=this.name,a=[];p.CONFIG.dependencies.hasOwnProperty(o)?a.push.apply(a,h(p.CONFIG.dependencies[o])):"core"!==o&&a.push("core");try{for(var s=f(a),c=s.next();!c.done;c=s.next()){var l=c.value,u=n.get(l)||new d(l,i);this.dependencies.indexOf(u)<0&&(u.addDependent(this,i),this.dependencies.push(u),u.isLoaded||(this.dependencyCount++,r.push(u.promise)))}}catch(t){e={error:t}}finally{try{c&&!c.done&&(t=s.return)&&t.call(s)}finally{if(e)throw e.error}}return r},d.prototype.makePromise=function(t){var r=this,e=new Promise(function(t,e){r.resolve=t,r.reject=e}),n=p.CONFIG[this.name]||{};return n.ready&&(e=e.then(function(t){return n.ready(r.name)})),t.length&&(t.push(e),e=Promise.all(t).then(function(t){return t.join(", ")})),n.failed&&e.catch(function(t){return n.failed(new s(t,r.name))}),e},d.prototype.load=function(){if(!this.isLoaded&&!this.isLoading&&!this.noLoad){this.isLoading=!0;var t=d.resolvePath(this.name);p.CONFIG.require?this.loadCustom(t):this.loadScript(t)}},d.prototype.loadCustom=function(t){var e=this;try{var r=p.CONFIG.require(t);r instanceof Promise?r.then(function(){return e.checkLoad()}).catch(function(){return e.failed("Can't load \""+t+'"')}):this.checkLoad()}catch(t){this.failed(t.message)}},d.prototype.loadScript=function(e){var r=this,t=document.createElement("script");t.src=e,t.charset="UTF-8",t.onload=function(t){return r.checkLoad()},t.onerror=function(t){return r.failed("Can't load \""+e+'"')},document.head.appendChild(t)},d.prototype.loaded=function(){var e,t,r,n;this.isLoaded=!0,this.isLoading=!1;try{for(var i=f(this.dependents),o=i.next();!o.done;o=i.next())o.value.requirementSatisfied()}catch(t){e={error:t}}finally{try{o&&!o.done&&(t=i.return)&&t.call(i)}finally{if(e)throw e.error}}try{for(var a=f(this.provided),s=a.next();!s.done;s=a.next())s.value.loaded()}catch(t){r={error:t}}finally{try{s&&!s.done&&(n=a.return)&&n.call(a)}finally{if(r)throw r.error}}this.resolve(this.name)},d.prototype.failed=function(t){this.hasFailed=!0,this.isLoading=!1,this.reject(new s(t,this.name))},d.prototype.checkLoad=function(){var e=this;((p.CONFIG[this.name]||{}).checkReady||function(){return Promise.resolve()})().then(function(){return e.loaded()}).catch(function(t){return e.failed(t)})},d.prototype.requirementSatisfied=function(){this.dependencyCount&&(this.dependencyCount--,this.canLoad&&this.load())},d.prototype.provides=function(t){var e,r;void 0===t&&(t=[]);try{for(var n=f(t),i=n.next();!i.done;i=n.next()){var o=i.value,a=d.packages.get(o);a||(p.CONFIG.dependencies[o]||(p.CONFIG.dependencies[o]=[]),p.CONFIG.dependencies[o].push(o),(a=new d(o,!0)).isLoading=!0),this.provided.push(a)}}catch(t){e={error:t}}finally{try{i&&!i.done&&(r=n.return)&&r.call(n)}finally{if(e)throw e.error}}},d.prototype.addDependent=function(t,e){this.dependents.push(t),e||this.checkNoLoad()},d.prototype.checkNoLoad=function(){var e,t;if(this.noLoad){this.noLoad=!1;try{for(var r=f(this.dependencies),n=r.next();!n.done;n=r.next())n.value.checkNoLoad()}catch(t){e={error:t}}finally{try{n&&!n.done&&(t=r.return)&&t.call(r)}finally{if(e)throw e.error}}}},d.loadAll=function(){var e,t;try{for(var r=f(this.packages.values()),n=r.next();!n.done;n=r.next()){var i=n.value;i.canLoad&&i.load()}}catch(t){e={error:t}}finally{try{n&&!n.done&&(t=r.return)&&t.call(r)}finally{if(e)throw e.error}}},d.packages=new Map,d);function d(t,e){void 0===e&&(e=!1),this.isLoaded=!1,this.isLoading=!1,this.hasFailed=!1,this.dependents=[],this.dependencies=[],this.dependencyCount=0,this.provided=[],this.name=t,this.noLoad=e,d.packages.set(t,this),this.promise=this.makePromise(this.makeDependencies())}e.Package=l},function(t,r,e){"use strict";var c=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(r,"__esModule",{value:!0}),r.INHERIT="_inherit_";var n=(i.prototype.set=function(t,e){this.attributes[t]=e},i.prototype.setList=function(t){Object.assign(this.attributes,t)},i.prototype.get=function(t){var e=this.attributes[t];return e===r.INHERIT&&(e=this.global[t]),e},i.prototype.getExplicit=function(t){if(this.attributes.hasOwnProperty(t))return this.attributes[t]},i.prototype.getList=function(){for(var e,t,r=[],n=0;n=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},s=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0}),e.V=1,e.H=2,e.NOSTRETCH={dir:0};var i=(o.charOptions=function(t,e){var r=t[e];return 3===r.length&&(r[3]={}),r[3]},o.prototype.createVariant=function(t,e,r){void 0===e&&(e=null),void 0===r&&(r=null);var n={linked:[],chars:e?Object.create(this.variant[e].chars):{}};r&&this.variant[r]&&(Object.assign(n.chars,this.variant[r].chars),this.variant[r].linked.push(n.chars),n.chars=Object.create(n.chars)),this.variant[t]=n},o.prototype.createVariants=function(t){var e,r;try{for(var n=c(t),i=n.next();!i.done;i=n.next()){var o=i.value;this.createVariant(o[0],o[1],o[2])}}catch(t){e={error:t}}finally{try{i&&!i.done&&(r=n.return)&&r.call(n)}finally{if(e)throw e.error}}},o.prototype.defineChars=function(t,e){var r,n,i=this.variant[t];Object.assign(i.chars,e);try{for(var o=c(i.linked),a=o.next();!a.done;a=o.next()){var s=a.value;Object.assign(s,e)}}catch(t){r={error:t}}finally{try{a&&!a.done&&(n=o.return)&&n.call(o)}finally{if(r)throw r.error}}},o.prototype.defineDelimiters=function(t){Object.assign(this.delimiters,t)},o.prototype.defineRemap=function(t,e){this.remapChars.hasOwnProperty(t)||(this.remapChars[t]={}),Object.assign(this.remapChars[t],e)},o.prototype.getDelimiter=function(t){return this.delimiters[t]},o.prototype.getSizeVariant=function(t,e){return this.delimiters[t].variants&&(e=this.delimiters[t].variants[e]),this.sizeVariants[e]},o.prototype.getChar=function(t,e){return this.variant[t].chars[e]},o.prototype.getVariant=function(t){return this.variant[t]},o.prototype.getCssFont=function(t){return this.cssFontMap[t]||["serif",!1,!1]},o.prototype.getRemappedChar=function(t,e){return(this.remapChars[t]||{})[e]},o.OPTIONS={},o.defaultVariants=[["normal"],["bold","normal"],["italic","normal"],["bold-italic","italic","bold"],["double-struck","bold"],["fraktur","normal"],["bold-fraktur","bold","fraktur"],["script","normal"],["bold-script","bold","script"],["sans-serif","normal"],["bold-sans-serif","bold","sans-serif"],["sans-serif-italic","italic","sans-serif"],["bold-sans-serif-italic","bold-italic","sans-serif"],["monospace","normal"]],o.defaultCssFonts={normal:["serif",!1,!1],bold:["serif",!1,!0],italic:["serif",!0,!1],"bold-italic":["serif",!0,!0],"double-struck":["serif",!1,!0],fraktur:["serif",!1,!1],"bold-fraktur":["serif",!1,!0],script:["cursive",!1,!1],"bold-script":["cursive",!1,!0],"sans-serif":["sans-serif",!1,!1],"bold-sans-serif":["sans-serif",!1,!0],"sans-serif-italic":["sans-serif",!0,!1],"bold-sans-serif-italic":["sans-serif",!0,!0],monospace:["monospace",!1,!1]},o.defaultAccentMap={768:"\u02cb",769:"\u02ca",770:"\u02c6",771:"\u02dc",772:"\u02c9",774:"\u02d8",775:"\u02d9",776:"\xa8",778:"\u02da",780:"\u02c7",8594:"\u20d7",8242:"'",8243:"''",8244:"'''",8245:"`",8246:"``",8247:"```",8279:"''''",8400:"\u21bc",8401:"\u21c0",8406:"\u2190",8417:"\u2194",8432:"*",8411:"...",8412:"....",8428:"\u21c1",8429:"\u21bd",8430:"\u2190",8431:"\u2192"},o.defaultMoMap={45:"\u2212"},o.defaultMnMap={45:"\u2212"},o.defaultParams={x_height:.442,quad:1,num1:.676,num2:.394,num3:.444,denom1:.686,denom2:.345,sup1:.413,sup2:.363,sup3:.289,sub1:.15,sub2:.247,sup_drop:.386,sub_drop:.05,delim1:2.39,delim2:1,axis_height:.25,rule_thickness:.06,big_op_spacing1:.111,big_op_spacing2:.167,big_op_spacing3:.2,big_op_spacing4:.6,big_op_spacing5:.1,surd_height:.075,scriptspace:.05,nulldelimiterspace:.12,delimiterfactor:901,delimitershortfall:.3,min_rule_thickness:1.25},o.defaultDelimiters={},o.defaultChars={},o.defaultSizeVariants=[],o);function o(){var e,t;this.variant={},this.delimiters={},this.cssFontMap={},this.remapChars={};var r=this.constructor;this.params=a({},r.defaultParams),this.sizeVariants=s(r.defaultSizeVariants),this.cssFontMap=a({},r.defaultCssFonts),this.createVariants(r.defaultVariants),this.defineDelimiters(r.defaultDelimiters);try{for(var n=c(Object.keys(r.defaultChars)),i=n.next();!i.done;i=n.next()){var o=i.value;this.defineChars(o,r.defaultChars[o])}}catch(t){e={error:t}}finally{try{i&&!i.done&&(t=n.return)&&t.call(n)}finally{if(e)throw e.error}}this.defineRemap("accent",r.defaultAccentMap),this.defineRemap("mo",r.defaultMoMap),this.defineRemap("mn",r.defaultMnMap)}e.FontData=i},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=(i.prototype[Symbol.iterator]=function(){var t=0,e=this.items;return{next:function(){return{value:e[t++],done:t>e.length}}}},i.prototype.add=function(t,e){void 0===e&&(e=i.DEFAULTPRIORITY);for(var r=this.items.length;0<=--r&&e=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},o=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var M,a=r(0),s=r(86),c=(M=a.AbstractMmlTokenNode,i(l,M),Object.defineProperty(l.prototype,"kind",{get:function(){return"mo"},enumerable:!0,configurable:!0}),Object.defineProperty(l.prototype,"isEmbellished",{get:function(){return!0},enumerable:!0,configurable:!0}),Object.defineProperty(l.prototype,"hasNewLine",{get:function(){return"newline"===this.attributes.get("linebreak")},enumerable:!0,configurable:!0}),l.prototype.coreParent=function(){for(var t=this,e=this.factory.getNodeClass("math");t&&t.isEmbellished&&t.coreMO()===this&&!(t instanceof e);)t=t.Parent;return t},l.prototype.coreText=function(t){if(!t)return"";if(t.isEmbellished)return t.coreMO().getText();for(;((t.isKind("mrow")||t.isKind("TeXAtom")||t.isKind("mstyle")||t.isKind("mphantom"))&&1===t.childNodes.length||t.isKind("munderover"))&&t.childNodes[0];)t=t.childNodes[0];return t.isToken?t.getText():""},l.prototype.hasSpacingAttributes=function(){return this.attributes.isSet("lspace")||this.attributes.isSet("rspace")},Object.defineProperty(l.prototype,"isAccent",{get:function(){var t=!1,e=this.coreParent();if(e){var r=e.isKind("mover")?e.childNodes[e.over].coreMO()?"accent":"":e.isKind("munder")?e.childNodes[e.under].coreMO()?"accentunder":"":e.isKind("munderover")?this===e.childNodes[e.over].coreMO()?"accent":this===e.childNodes[e.under].coreMO()?"accentunder":"":"";r&&(t=void 0!==e.attributes.getExplicit(r)?t:this.attributes.get("accent"))}return t},enumerable:!0,configurable:!0}),l.prototype.setTeXclass=function(t){var e=this.attributes.getList("form","fence"),r=e.form,n=e.fence;return this.attributes.isSet("lspace")||this.attributes.isSet("rspace")?(this.texClass=a.TEXCLASS.NONE,null):(n&&this.texClass===a.TEXCLASS.REL&&("prefix"===r&&(this.texClass=a.TEXCLASS.OPEN),"postfix"===r&&(this.texClass=a.TEXCLASS.CLOSE)),"\u2061"===this.getText()?(t&&(t.texClass=a.TEXCLASS.OP,t.setProperty("fnOP",!0)),this.texClass=this.prevClass=a.TEXCLASS.NONE,t):this.adjustTeXclass(t))},l.prototype.adjustTeXclass=function(t){var e=this.texClass,r=this.prevClass;if(e===a.TEXCLASS.NONE)return t;if(t?(!t.getProperty("autoOp")||e!==a.TEXCLASS.BIN&&e!==a.TEXCLASS.REL||(e=this.texClass=a.TEXCLASS.ORD),r=this.prevClass=t.texClass||a.TEXCLASS.ORD,this.prevLevel=this.attributes.getInherited("scriptlevel")):r=this.prevClass=a.TEXCLASS.NONE,e!==a.TEXCLASS.BIN||r!==a.TEXCLASS.NONE&&r!==a.TEXCLASS.BIN&&r!==a.TEXCLASS.OP&&r!==a.TEXCLASS.REL&&r!==a.TEXCLASS.OPEN&&r!==a.TEXCLASS.PUNCT)if(r!==a.TEXCLASS.BIN||e!==a.TEXCLASS.REL&&e!==a.TEXCLASS.CLOSE&&e!==a.TEXCLASS.PUNCT){if(e===a.TEXCLASS.BIN){for(var n=this,i=this.parent;i&&i.parent&&i.isEmbellished&&(1===i.childNodes.length||!i.isKind("mrow")&&i.core()===n);)i=(n=i).parent;i.childNodes[i.childNodes.length-1]===n&&(this.texClass=a.TEXCLASS.ORD)}}else t.texClass=this.prevClass=a.TEXCLASS.ORD;else this.texClass=a.TEXCLASS.ORD;return this},l.prototype.setInheritedAttributes=function(t,e,r,n){var i,o;void 0===t&&(t={}),void 0===e&&(e=!1),void 0===r&&(r=0),void 0===n&&(n=!1),M.prototype.setInheritedAttributes.call(this,t,e,r,n);var a=this.getText(),s=b(this.handleExplicitForm(this.getForms()),3),c=s[0],l=s[1],u=s[2];this.attributes.setInherited("form",c);var h=this.constructor.OPTABLE,f=h[c][a]||h[l][a]||h[u][a];if(f){void 0===this.getProperty("texClass")&&(this.texClass=f[2]);try{for(var p=g(Object.keys(f[3]||{})),d=p.next();!d.done;d=p.next()){var m=d.value;this.attributes.setInherited(m,f[3][m])}}catch(t){i={error:t}}finally{try{d&&!d.done&&(o=p.return)&&o.call(p)}finally{if(i)throw i.error}}this.lspace=(f[0]+1)/18,this.rspace=(f[1]+1)/18}else{var y=this.getRange(a);if(y){void 0===this.getProperty("texClass")&&(this.texClass=y[2]);var v=this.constructor.MMLSPACING[y[2]];this.lspace=(v[0]+1)/18,this.rspace=(v[1]+1)/18}}},l.prototype.getForms=function(){for(var t=this,e=this.parent,r=this.Parent;r&&r.isEmbellished;)t=e,e=r.parent,r=r.Parent;if(e&&e.isKind("mrow")&&1!==e.nonSpaceLength()){if(e.firstNonSpace()===t)return["prefix","infix","postfix"];if(e.lastNonSpace()===t)return["postfix","infix","prefix"]}return["infix","prefix","postfix"]},l.prototype.handleExplicitForm=function(t){if(this.attributes.isSet("form")){var e=this.attributes.get("form");t=[e].concat(t.filter(function(t){return t!==e}))}return t},l.prototype.getRange=function(t){var e,r;if(!t.match(/^[\uD800-\uDBFF]?.$/))return null;var n=t.charCodeAt(0);2===t.length&&(n=1024*(n-55296)+t.charCodeAt(1)-56320+65536);var i=this.constructor.RANGES;try{for(var o=g(i),a=o.next();!a.done;a=o.next()){var s=a.value;if(s[0]<=n&&n<=s[1])return s;if(n=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var o=r(21),s=function(t,e){void 0===t&&(t="???"),void 0===e&&(e=""),this.tag=t,this.id=e};e.Label=s;var c=function(t,e,r,n,i,o,a,s){void 0===t&&(t=""),void 0===e&&(e=!1),void 0===r&&(r=!1),void 0===n&&(n=null),void 0===i&&(i=""),void 0===o&&(o=""),void 0===a&&(a=!1),void 0===s&&(s=""),this.env=t,this.taggable=e,this.defaultTags=r,this.tag=n,this.tagId=i,this.tagFormat=o,this.noTag=a,this.labelId=s};e.TagInfo=c;var l=(u.prototype.start=function(t,e,r){this.currentTag&&this.stack.push(this.currentTag),this.currentTag=new c(t,e,r)},Object.defineProperty(u.prototype,"env",{get:function(){return this.currentTag.env},enumerable:!0,configurable:!0}),u.prototype.end=function(){this.history.push(this.currentTag),this.currentTag=this.stack.pop()},u.prototype.tag=function(t,e){this.currentTag.tag=t,this.currentTag.tagFormat=e?t:this.formatTag(t),this.currentTag.noTag=!1},u.prototype.notag=function(){this.tag("",!0),this.currentTag.noTag=!0},Object.defineProperty(u.prototype,"noTag",{get:function(){return this.currentTag.noTag},enumerable:!0,configurable:!0}),Object.defineProperty(u.prototype,"label",{get:function(){return this.currentTag.labelId},set:function(t){this.currentTag.labelId=t},enumerable:!0,configurable:!0}),u.prototype.formatUrl=function(t,e){return e+"#"+encodeURIComponent(t)},u.prototype.formatTag=function(t){return"("+t+")"},u.prototype.formatId=function(t){return"mjx-eqn-"+t.replace(/\s/g,"_")},u.prototype.formatNumber=function(t){return t.toString()},u.prototype.autoTag=function(){null==this.currentTag.tag&&(this.counter++,this.tag(this.formatNumber(this.counter),!1))},u.prototype.clearTag=function(){this.label="",this.tag(null,!0),this.currentTag.tagId=""},u.prototype.getTag=function(t){if(void 0===t&&(t=!1),t)return this.autoTag(),this.makeTag();var e=this.currentTag;return e.taggable&&!e.noTag&&(e.defaultTags&&this.autoTag(),e.tag)?this.makeTag():null},u.prototype.resetTag=function(){this.history=[],this.redo=!1,this.refUpdate=!1,this.clearTag()},u.prototype.reset=function(t){void 0===t&&(t=0),this.resetTag(),this.counter=this.allCounter=t,this.allLabels={},this.allIds={}},u.prototype.startEquation=function(t){this.labels={},this.ids={},this.counter=this.allCounter,this.redo=!1;var e=t.inputData.recompile;e&&(this.refUpdate=!0,this.counter=e.counter)},u.prototype.finishEquation=function(t){this.redo&&(t.inputData.recompile={state:t.state(),counter:this.allCounter}),this.refUpdate||(this.allCounter=this.counter),Object.assign(this.allIds,this.ids),Object.assign(this.allLabels,this.labels)},u.prototype.finalize=function(t,e){if(!e.display||this.currentTag.env||null==this.currentTag.tag)return t;var r=this.makeTag();return this.enTag(t,r)},u.prototype.makeId=function(){this.currentTag.tagId=this.formatId(this.configuration.options.useLabelIds&&this.label||this.currentTag.tag)},u.prototype.makeTag=function(){this.makeId(),this.label&&(this.labels[this.label]=new s(this.currentTag.tag,this.currentTag.tagId));var t=new o.default("\\text{"+this.currentTag.tagFormat+"}",{},this.configuration).mml();return this.configuration.nodeFactory.create("node","mtd",[t],{id:this.currentTag.tagId})},u);function u(){this.counter=0,this.allCounter=0,this.configuration=null,this.ids={},this.allIds={},this.labels={},this.allLabels={},this.redo=!1,this.refUpdate=!1,this.currentTag=new c,this.history=[],this.stack=[],this.enTag=function(t,e){var r=this.configuration.nodeFactory,n=r.create("node","mtd",[t]),i=r.create("node","mlabeledtr",[e,n]);return r.create("node","mtable",[i],{side:this.configuration.options.tagSide,minlabelspacing:this.configuration.options.tagIndent,displaystyle:!0})}}e.AbstractTags=l;var h,f=(i(p,h=l),p.prototype.autoTag=function(){},p.prototype.getTag=function(){return this.currentTag.tag?h.prototype.getTag.call(this):null},p);function p(){return null!==h&&h.apply(this,arguments)||this}e.NoTags=f;var d,m,y,v,b=(i(g,d=l),g.prototype.finalize=function(t,e){if(!e.display||this.history.find(function(t){return t.taggable}))return t;var r=this.getTag(!0);return this.enTag(t,r)},g);function g(){return null!==d&&d.apply(this,arguments)||this}e.AllTags=b,m=e.TagsFactory||(e.TagsFactory={}),y=new Map([["none",f],["all",b]]),v="none",m.OPTIONS={tags:v,tagSide:"right",tagIndent:"0.8em",multlineWidth:"85%",useLabelIds:!0,ignoreDuplicateLabels:!1},m.add=function(t,e){y.set(t,e)},m.addTags=function(t){var e,r;try{for(var n=a(Object.keys(t)),i=n.next();!i.done;i=n.next()){var o=i.value;m.add(o,t[o])}}catch(t){e={error:t}}finally{try{i&&!i.done&&(r=n.return)&&r.call(n)}finally{if(e)throw e.error}}},m.create=function(t){return new(y.get(t)||this.defaultTags)},m.setDefault=function(t){v=t},m.getDefault=function(){return m.create(v)}},function($K,_K){var aL;aL=function(){return this}();try{aL=aL||Function("return this")()||eval("this")}catch(t){"object"==typeof window&&(aL=window)}$K.exports=aL},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(3),i=r(25),o=(Object.defineProperty(a.prototype,"name",{get:function(){return this.constructor.NAME},enumerable:!0,configurable:!0}),a.prototype.setAdaptor=function(t){this.adaptor=t},a.prototype.setMmlFactory=function(t){this.mmlFactory=t},a.prototype.initialize=function(){},Object.defineProperty(a.prototype,"processStrings",{get:function(){return!0},enumerable:!0,configurable:!0}),a.prototype.findMath=function(t,e){return[]},a.prototype.executeFilters=function(t,e,r,n){var i={math:e,document:r,data:n};return t.execute(i),i.data},a.NAME="generic",a.OPTIONS={},a);function a(t){void 0===t&&(t={}),this.adaptor=null,this.mmlFactory=null;var e=this.constructor;this.options=n.userOptions(n.defaultOptions({},e.OPTIONS),t),this.preFilters=new i.FunctionList,this.postFilters=new i.FunctionList}e.AbstractInputJax=o},function(t,e,r){"use strict";var a=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},n=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var c=r(4),l=(Object.defineProperty(u.prototype,"nodes",{get:function(){return this._nodes},enumerable:!0,configurable:!0}),u.prototype.Push=function(){for(var t,e=[],r=0;rt.configuration.options.maxMacros)throw new d.default("MaxMacroSub2","MathJax maximum substitution count exceeded; is there a recursive latex environment?");t.parse("environment",[t,r])},i.Array=function(t,e,r,n,i,o,a,s,c){var l=("c"+(i=i||t.GetArgument("\\begin{"+e.getName()+"}"))).replace(/[^clr|:]/g,"").replace(/[^|:]([|:])+/g,"$1");i=(i=i.replace(/[^clr]/g,"").split("").join(" ")).replace(/l/g,"left").replace(/r/g,"right").replace(/c/g,"center");var u=t.itemFactory.create("array");return u.arraydef={columnalign:i,columnspacing:o||"1em",rowspacing:a||"4pt"},l.match(/[|:]/)&&(l.charAt(0).match(/[|:]/)&&(u.frame.push("left"),u.dashed=":"===l.charAt(0)),l.charAt(l.length-1).match(/[|:]/)&&u.frame.push("right"),l=l.substr(1,l.length-2),u.arraydef.columnlines=l.split("").join(" ").replace(/[^|: ]/g,"none").replace(/\|/g,"solid").replace(/:/g,"dashed")),r&&u.setProperty("open",t.convertDelimiter(r)),n&&u.setProperty("close",t.convertDelimiter(n)),"D"===s?u.arraydef.displaystyle=!0:s&&(u.arraydef.displaystyle=!1),"S"===s&&(u.arraydef.scriptlevel=1),c&&(u.arraydef.useHeight=!1),t.Push(e),u},i.AlignedArray=function(t,e){var r=t.GetBrackets("\\begin{"+e.getName()+"}"),n=i.Array(t,e);return y.default.setArrayAlign(n,r)},i.Equation=function(t,e,r){return t.Push(e),y.default.checkEqnEnv(t),t.itemFactory.create("equation",r).setProperty("name",e.getName())},i.EqnArray=function(t,e,r,n,i,o){t.Push(e),n&&y.default.checkEqnEnv(t),i=(i=i.replace(/[^clr]/g,"").split("").join(" ")).replace(/l/g,"left").replace(/r/g,"right").replace(/c/g,"center");var a=t.itemFactory.create("eqnarray",e.getName(),r,n,t.stack.global);return a.arraydef={displaystyle:!0,columnalign:i,columnspacing:o||"1em",rowspacing:"3pt",side:t.options.tagSide,minlabelspacing:t.options.tagIndent},a},i.HandleNoTag=function(t,e){t.tags.notag()},i.HandleLabel=function(t,e){t.stack.global;var r=t.GetArgument(e);if(""!==r&&!t.tags.refUpdate){if(t.tags.label)throw new d.default("MultipleCommand","Multiple %1",t.currentCS);if(t.tags.label=r,(t.tags.allLabels[r]||t.tags.labels[r])&&!t.options.ignoreDuplicateLabels)throw new d.default("MultipleLabel","Label '%1' multiply defined",r);t.tags.labels[r]=new s.Label}},i.HandleRef=function(t,e,r){var n=t.GetArgument(e),i=t.tags.allLabels[n]||t.tags.labels[n];i||(t.tags.refUpdate||(t.tags.redo=!0),i=new s.Label);var o=i.tag;r&&(o=t.tags.formatTag(o));var a=t.create("node","mrow",y.default.internalMath(t,o),{href:t.tags.formatUrl(i.id,t.options.baseURL),class:"MathJax_ref"});t.Push(a)},i.Macro=function(t,e,r,n,i){if(n){var o=[];if(null!=i){var a=t.GetBrackets(e);o.push(null==a?i:a)}for(var s=o.length;st.configuration.options.maxMacros)throw new d.default("MaxMacroSub1","MathJax maximum macro substitution count exceeded; is there a recursive macro call?")},i.MathChoice=function(t,e){var r=t.ParseArg(e),n=t.ParseArg(e),i=t.ParseArg(e),o=t.ParseArg(e);t.Push(t.create("node","mathchoice",[r,n,i,o]))},e.default=i},function(t,p,e){"use strict";var d=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0Math.PI/2-r?t.thickness*h*Math.sin(u+r-Math.PI/2):0);return[f,p,f,p]},remove:e[3]}]}},p.CommonArrow=function(f){return function(t){var e=d(p.arrowDef[t],4),l=e[0],u=e[1],h=e[2],r=e[3];return[t+"arrow",{renderer:function(t,e){var r=t.getBBox(),n=r.w,i=r.h,o=r.d,a=d(h?[i+o,n]:[n,i+o],2),s=a[0],c=(a[1],t.arrow(s,l,u));f(t,c)},bbox:p.arrowBBox[t],remove:r}]}}},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),h=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0 *":{display:"block"}},g.useIC=!1,g);function g(){return null!==v&&v.apply(this,arguments)||this}e.CHTMLmsubsup=b},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),f=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},p=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=e&&a.item.renderDoc(t))return}}catch(t){r={error:t}}finally{try{o&&!o.done&&(n=i.return)&&n.call(i)}finally{if(r)throw r.error}}},v.prototype.renderMath=function(t,e,r){var n,i;void 0===r&&(r=m.STATE.UNPROCESSED);try{for(var o=h(this.items),a=o.next();!a.done;a=o.next()){var s=a.value;if(s.priority>=r&&s.item.renderMath(t,e))return}}catch(t){n={error:t}}finally{try{a&&!a.done&&(i=o.return)&&i.call(o)}finally{if(n)throw n.error}}},v.prototype.renderConvert=function(t,e,r){var n,i;void 0===r&&(r=m.STATE.LAST);try{for(var o=h(this.items),a=o.next();!a.done;a=o.next()){var s=a.value;if(s.priority>=r)return;if(s.item.convert&&s.item.renderMath(t,e))return}}catch(t){n={error:t}}finally{try{a&&!a.done&&(i=o.return)&&i.call(o)}finally{if(n)throw n.error}}},v.prototype.findID=function(t){var e,r;try{for(var n=h(this.items),i=n.next();!i.done;i=n.next()){var o=i.value;if(o.item.id===t)return o.item}}catch(t){e={error:t}}finally{try{i&&!i.done&&(r=n.return)&&r.call(n)}finally{if(e)throw e.error}}return null},v);function v(){return null!==o&&o.apply(this,arguments)||this}e.RenderList=y;var b,g=(b=a.AbstractInputJax,i(M,b),M.prototype.compile=function(t){return null},M);function M(){return null!==b&&b.apply(this,arguments)||this}var O,x=(O=s.AbstractOutputJax,i(S,O),S.prototype.typeset=function(t,e){return void 0===e&&(e=null),null},S.prototype.escaped=function(t,e){return null},S);function S(){return null!==O&&O.apply(this,arguments)||this}var E,C=(E=c.AbstractMathList,i(_,E),_);function _(){return null!==E&&E.apply(this,arguments)||this}var T,w=(T=m.AbstractMathItem,i(A,T),A);function A(){return null!==T&&T.apply(this,arguments)||this}var k=(Object.defineProperty(I.prototype,"kind",{get:function(){return this.constructor.KIND},enumerable:!0,configurable:!0}),I.prototype.addRenderAction=function(t){for(var e=[],r=1;r=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var o=(Object.defineProperty(a.prototype,"factory",{get:function(){return this._factory},enumerable:!0,configurable:!0}),Object.defineProperty(a.prototype,"kind",{get:function(){return"unknown"},enumerable:!0,configurable:!0}),a.prototype.setProperty=function(t,e){this.properties[t]=e},a.prototype.getProperty=function(t){return this.properties[t]},a.prototype.getPropertyNames=function(){return Object.keys(this.properties)},a.prototype.getAllProperties=function(){return this.properties},a.prototype.removeProperty=function(){for(var e,t,r=[],n=0;n=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var a,u=r(0),s=(a=u.AbstractMmlNode,i(c,a),Object.defineProperty(c.prototype,"kind",{get:function(){return"mrow"},enumerable:!0,configurable:!0}),Object.defineProperty(c.prototype,"isSpacelike",{get:function(){var e,t;try{for(var r=l(this.childNodes),n=r.next();!n.done;n=r.next())if(!n.value.isSpacelike)return!1}catch(t){e={error:t}}finally{try{n&&!n.done&&(t=r.return)&&t.call(r)}finally{if(e)throw e.error}}return!0},enumerable:!0,configurable:!0}),Object.defineProperty(c.prototype,"isEmbellished",{get:function(){var e,t,r=!1,n=0;try{for(var i=l(this.childNodes),o=i.next();!o.done;o=i.next()){var a=o.value;if(a)if(a.isEmbellished){if(r)return!1;r=!0,this._core=n}else if(!a.isSpacelike)return!1;n++}}catch(t){e={error:t}}finally{try{o&&!o.done&&(t=i.return)&&t.call(i)}finally{if(e)throw e.error}}return r},enumerable:!0,configurable:!0}),c.prototype.core=function(){return this.isEmbellished&&null!=this._core?this.childNodes[this._core]:this},c.prototype.coreMO=function(){return this.isEmbellished&&null!=this._core?this.childNodes[this._core].coreMO():this},c.prototype.nonSpaceLength=function(){var e,t,r=0;try{for(var n=l(this.childNodes),i=n.next();!i.done;i=n.next()){var o=i.value;o&&!o.isSpacelike&&r++}}catch(t){e={error:t}}finally{try{i&&!i.done&&(t=n.return)&&t.call(n)}finally{if(e)throw e.error}}return r},c.prototype.firstNonSpace=function(){var e,t;try{for(var r=l(this.childNodes),n=r.next();!n.done;n=r.next()){var i=n.value;if(i&&!i.isSpacelike)return i}}catch(t){e={error:t}}finally{try{n&&!n.done&&(t=r.return)&&t.call(r)}finally{if(e)throw e.error}}return null},c.prototype.lastNonSpace=function(){for(var t=this.childNodes.length;0<=--t;){var e=this.childNodes[t];if(e&&!e.isSpacelike)return e}return null},c.prototype.setTeXclass=function(t){var e,r,n,i;if(null==this.getProperty("open")&&null==this.getProperty("close")||t&&null==t.getProperty("fnOp")){try{for(var o=l(this.childNodes),a=o.next();!a.done;a=o.next())t=a.value.setTeXclass(t)}catch(t){n={error:t}}finally{try{a&&!a.done&&(i=o.return)&&i.call(o)}finally{if(n)throw n.error}}this.childNodes[0]&&this.updateTeXclass(this.childNodes[0])}else{this.getPrevClass(t),t=null;try{for(var s=l(this.childNodes),c=s.next();!c.done;c=s.next())t=c.value.setTeXclass(t)}catch(t){e={error:t}}finally{try{c&&!c.done&&(r=s.return)&&r.call(s)}finally{if(e)throw e.error}}null==this.texClass&&(this.texClass=u.TEXCLASS.INNER)}return t},c.defaults=o({},u.AbstractMmlNode.defaults),c);function c(){var t=null!==a&&a.apply(this,arguments)||this;return t._core=null,t}e.MmlMrow=s;var h,f=(i(p,h=s),Object.defineProperty(p.prototype,"kind",{get:function(){return"inferredMrow"},enumerable:!0,configurable:!0}),Object.defineProperty(p.prototype,"isInferred",{get:function(){return!0},enumerable:!0,configurable:!0}),Object.defineProperty(p.prototype,"notParent",{get:function(){return!0},enumerable:!0,configurable:!0}),p.prototype.toString=function(){return"["+this.childNodes.join(",")+"]"},p.defaults=s.defaults,p);function p(){return null!==h&&h.apply(this,arguments)||this}e.MmlInferredMrow=f},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),o=this&&this.__assign||function(){return(o=Object.assign||function(t){for(var e,r=1,n=arguments.length;r=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var s,c=r(0),l=(s=c.AbstractMmlBaseNode,i(u,s),Object.defineProperty(u.prototype,"kind",{get:function(){return"mfrac"},enumerable:!0,configurable:!0}),Object.defineProperty(u.prototype,"arity",{get:function(){return 2},enumerable:!0,configurable:!0}),Object.defineProperty(u.prototype,"linebreakContainer",{get:function(){return!0},enumerable:!0,configurable:!0}),u.prototype.setTeXclass=function(t){var e,r;this.getPrevClass(t);try{for(var n=a(this.childNodes),i=n.next();!i.done;i=n.next())i.value.setTeXclass(null)}catch(t){e={error:t}}finally{try{i&&!i.done&&(r=n.return)&&r.call(n)}finally{if(e)throw e.error}}return this.isEmbellished&&this.updateTeXclass(this.core()),this},u.prototype.setChildInheritedAttributes=function(t,e,r,n){(!e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var u,h=r(0),a=(u=h.AbstractMmlNode,i(s,u),Object.defineProperty(s.prototype,"kind",{get:function(){return"mfenced"},enumerable:!0,configurable:!0}),s.prototype.setTeXclass=function(t){this.getPrevClass(t),this.open&&(t=this.open.setTeXclass(t)),this.childNodes[0]&&(t=this.childNodes[0].setTeXclass(t));for(var e=1,r=this.childNodes.length;ethis.childNodes.length&&(t=1),this.attributes.set("selection",t)},l.defaults=o(o({},s.AbstractMmlNode.defaults),{actiontype:"toggle",selection:1}),l);function l(){return null!==a&&a.apply(this,arguments)||this}e.MmlMaction=c},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),o=this&&this.__assign||function(){return(o=Object.assign||function(t){for(var e,r=1,n=arguments.length;r=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var l,u=r(0),m=r(10),a=(l=u.AbstractMmlNode,i(s,l),Object.defineProperty(s.prototype,"kind",{get:function(){return"mtable"},enumerable:!0,configurable:!0}),Object.defineProperty(s.prototype,"linebreakContainer",{get:function(){return!0},enumerable:!0,configurable:!0}),s.prototype.setInheritedAttributes=function(t,e,r,n){var i,o;try{for(var a=d(u.indentAttributes),s=a.next();!s.done;s=a.next()){var c=s.value;t[c]&&this.attributes.setInherited(c,t[c][1]),void 0!==this.attributes.getExplicit(c)&&delete this.attributes.getAllAttributes()[c]}}catch(t){i={error:t}}finally{try{s&&!s.done&&(o=a.return)&&o.call(a)}finally{if(i)throw i.error}}l.prototype.setInheritedAttributes.call(this,t,e,r,n)},s.prototype.setChildInheritedAttributes=function(t,e,r,n){var i,o,a,s;try{for(var c=d(this.childNodes),l=c.next();!l.done;l=c.next())(p=l.value).isKind("mtr")||this.replaceChild(this.factory.create("mtr"),p).appendChild(p)}catch(t){i={error:t}}finally{try{l&&!l.done&&(o=c.return)&&o.call(c)}finally{if(i)throw i.error}}e=!(!this.attributes.getExplicit("displaystyle")&&!this.attributes.getDefault("displaystyle")),t=this.addInheritedAttributes(t,{columnalign:this.attributes.get("columnalign"),rowalign:"center"});var u=m.split(this.attributes.get("rowalign"));try{for(var h=d(this.childNodes),f=h.next();!f.done;f=h.next()){var p=f.value;t.rowalign[1]=u.shift()||t.rowalign[1],p.setInheritedAttributes(t,e,r,n)}}catch(t){a={error:t}}finally{try{f&&!f.done&&(s=h.return)&&s.call(h)}finally{if(a)throw a.error}}},s.prototype.verifyChildren=function(t){var e,r;if(!t.fixMtables)try{for(var n=d(this.childNodes),i=n.next();!i.done;i=n.next())i.value.isKind("mtr")||this.mError("Children of "+this.kind+" must be mtr or mlabeledtr",t)}catch(t){e={error:t}}finally{try{i&&!i.done&&(r=n.return)&&r.call(n)}finally{if(e)throw e.error}}l.prototype.verifyChildren.call(this,t)},s.prototype.setTeXclass=function(t){var e,r;this.getPrevClass(t);try{for(var n=d(this.childNodes),i=n.next();!i.done;i=n.next())i.value.setTeXclass(null)}catch(t){e={error:t}}finally{try{i&&!i.done&&(r=n.return)&&r.call(n)}finally{if(e)throw e.error}}return this},s.defaults=o(o({},u.AbstractMmlNode.defaults),{align:"axis",rowalign:"baseline",columnalign:"center",groupalign:"{left}",alignmentscope:!0,columnwidth:"auto",width:"auto",rowspacing:"1ex",columnspacing:".8em",rowlines:"none",columnlines:"none",frame:"none",framespacing:"0.4em 0.5ex",equalrows:!1,equalcolumns:!1,displaystyle:!1,side:"right",minlabelspacing:"0.8em"}),s);function s(){var t=null!==l&&l.apply(this,arguments)||this;return t.properties={useHeight:1},t.texClass=u.TEXCLASS.ORD,t}e.MmlMtable=a},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),o=this&&this.__assign||function(){return(o=Object.assign||function(t){for(var e,r=1,n=arguments.length;r=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var a,s=r(0),c=r(19),m=r(10),l=(a=s.AbstractMmlNode,i(u,a),Object.defineProperty(u.prototype,"kind",{get:function(){return"mtr"},enumerable:!0,configurable:!0}),Object.defineProperty(u.prototype,"linebreakContainer",{get:function(){return!0},enumerable:!0,configurable:!0}),u.prototype.setChildInheritedAttributes=function(t,e,r,n){var i,o,a,s;try{for(var c=d(this.childNodes),l=c.next();!l.done;l=c.next())(p=l.value).isKind("mtd")||this.replaceChild(this.factory.create("mtd"),p).appendChild(p)}catch(t){i={error:t}}finally{try{l&&!l.done&&(o=c.return)&&o.call(c)}finally{if(i)throw i.error}}var u=m.split(this.attributes.get("columnalign"));1===this.arity&&u.unshift(this.parent.attributes.get("side")),t=this.addInheritedAttributes(t,{rowalign:this.attributes.get("rowalign"),columnalign:"center"});try{for(var h=d(this.childNodes),f=h.next();!f.done;f=h.next()){var p=f.value;t.columnalign[1]=u.shift()||t.columnalign[1],p.setInheritedAttributes(t,e,r,n)}}catch(t){a={error:t}}finally{try{f&&!f.done&&(s=h.return)&&s.call(h)}finally{if(a)throw a.error}}},u.prototype.verifyChildren=function(t){var e,r;if(!this.parent||this.parent.isKind("mtable")){if(!t.fixMtables)try{for(var n=d(this.childNodes),i=n.next();!i.done;i=n.next()){var o=i.value;o.isKind("mtd")||this.replaceChild(this.factory.create("mtr"),o).mError("Children of "+this.kind+" must be mtd",t,!0)}}catch(t){e={error:t}}finally{try{i&&!i.done&&(r=n.return)&&r.call(n)}finally{if(e)throw e.error}}a.prototype.verifyChildren.call(this,t)}else this.mError(this.kind+" can only be a child of an mtable",t,!0)},u.prototype.setTeXclass=function(t){var e,r;this.getPrevClass(t);try{for(var n=d(this.childNodes),i=n.next();!i.done;i=n.next())i.value.setTeXclass(null)}catch(t){e={error:t}}finally{try{i&&!i.done&&(r=n.return)&&r.call(n)}finally{if(e)throw e.error}}return this},u.defaults=o(o({},s.AbstractMmlNode.defaults),{rowalign:c.INHERIT,columnalign:c.INHERIT,groupalign:c.INHERIT}),u);function u(){return null!==a&&a.apply(this,arguments)||this}e.MmlMtr=l;var h,f=(i(p,h=l),Object.defineProperty(p.prototype,"kind",{get:function(){return"mlabeledtr"},enumerable:!0,configurable:!0}),Object.defineProperty(p.prototype,"arity",{get:function(){return 1},enumerable:!0,configurable:!0}),p);function p(){return null!==h&&h.apply(this,arguments)||this}e.MmlMlabeledtr=f},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),o=this&&this.__assign||function(){return(o=Object.assign||function(t){for(var e,r=1,n=arguments.length;r=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},v=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0t.configuration.options.maxMacros)throw new l.default("MaxMacroSub1","MathJax maximum macro substitution count exceeded; is here a recursive macro call?")},BeginEnv:function(t,e,r,n,i,o){if(e.getProperty("end")&&t.stack.env.closing===e.getName()){delete t.stack.env.closing;var a=t.string.slice(t.i);return t.string=n,t.i=0,t.Parse(),t.string=a,t.i=0,t.itemFactory.create("end").setProperty("name",e.getName())}if(i){var s=[];if(null!=o){var c=t.GetBrackets("\\begin{"+e.getName()+"}");s.push(null==c?o:c)}for(var l=s.length;l=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var l=r(16);e.CommonMrowMixin=function(t){return i(e,s=t),Object.defineProperty(e.prototype,"fixesPWidth",{get:function(){return!1},enumerable:!0,configurable:!0}),e.prototype.stretchChildren=function(){var e,t,r,n,i,o,a=[];try{for(var s=S(this.childNodes),c=s.next();!c.done;c=s.next())(x=c.value).canStretch(1)&&a.push(x)}catch(t){e={error:t}}finally{try{c&&!c.done&&(t=s.return)&&t.call(s)}finally{if(e)throw e.error}}var l=a.length,u=this.childNodes.length;if(l&&1 mjx-box":{"border-top":".07em solid"},"mjx-sqrt.mjx-tall > mjx-box":{"padding-left":".3em","margin-left":"-.3em"}},u);function u(){return null!==o&&o.apply(this,arguments)||this}e.CHTMLmsqrt=l},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),C=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0}),e.CommonMtrMixin=function(t){return i(e,r=t),Object.defineProperty(e.prototype,"fixesPWidth",{get:function(){return!1},enumerable:!0,configurable:!0}),Object.defineProperty(e.prototype,"numCells",{get:function(){return this.childNodes.length},enumerable:!0,configurable:!0}),Object.defineProperty(e.prototype,"labeled",{get:function(){return!1},enumerable:!0,configurable:!0}),Object.defineProperty(e.prototype,"tableCells",{get:function(){return this.childNodes},enumerable:!0,configurable:!0}),e.prototype.getChild=function(t){return this.childNodes[t]},e.prototype.getChildBBoxes=function(){return this.childNodes.map(function(t){return t.getBBox()})},e.prototype.stretchChildren=function(t){var e,r,n,i,o,a;void 0===t&&(t=null);var s=[],c=this.labeled?this.childNodes.slice(1):this.childNodes;try{for(var l=C(c),u=l.next();!u.done;u=l.next())(E=u.value.childNodes[0]).canStretch(1)&&s.push(E)}catch(t){e={error:t}}finally{try{u&&!u.done&&(r=l.return)&&r.call(l)}finally{if(e)throw e.error}}var h=s.length,f=this.childNodes.length;if(h&&1=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(80),s=(o=a.AbstractDOMAdaptor,i(l,o),l.prototype.parse=function(t,e){return void 0===e&&(e="text/html"),this.parser.parseFromString(t,e)},l.prototype.create=function(t,e){return e?this.document.createElementNS(e,t):this.document.createElement(t)},l.prototype.text=function(t){return this.document.createTextNode(t)},l.prototype.head=function(t){return t.head},l.prototype.body=function(t){return t.body},l.prototype.root=function(t){return t.documentElement},l.prototype.tags=function(t,e,r){void 0===r&&(r=null);var n=r?t.getElementsByTagNameNS(r,e):t.getElementsByTagName(e);return Array.from(n)},l.prototype.getElements=function(t,e){var r,n,i=[];try{for(var o=c(t),a=o.next();!a.done;a=o.next()){var s=a.value;"string"==typeof s?i=i.concat(Array.from(this.document.querySelectorAll(s))):Array.isArray(s)?i=i.concat(Array.from(s)):s instanceof this.window.NodeList||s instanceof this.window.HTMLCollection?i=i.concat(Array.from(s)):i.push(s)}}catch(t){r={error:t}}finally{try{a&&!a.done&&(n=o.return)&&n.call(o)}finally{if(r)throw r.error}}return i},l.prototype.parent=function(t){return t.parentNode},l.prototype.append=function(t,e){return t.appendChild(e)},l.prototype.insert=function(t,e){return this.parent(e).insertBefore(t,e)},l.prototype.remove=function(t){return this.parent(t).removeChild(t)},l.prototype.replace=function(t,e){return this.parent(e).replaceChild(t,e)},l.prototype.clone=function(t){return t.cloneNode(!0)},l.prototype.split=function(t,e){return t.splitText(e)},l.prototype.next=function(t){return t.nextSibling},l.prototype.previous=function(t){return t.previousSibling},l.prototype.firstChild=function(t){return t.firstChild},l.prototype.lastChild=function(t){return t.lastChild},l.prototype.childNodes=function(t){return Array.from(t.childNodes)},l.prototype.childNode=function(t,e){return t.childNodes[e]},l.prototype.kind=function(t){return t.nodeName.toLowerCase()},l.prototype.value=function(t){return t.nodeValue||""},l.prototype.textContent=function(t){return t.textContent},l.prototype.innerHTML=function(t){return t.innerHTML},l.prototype.outerHTML=function(t){return t.outerHTML},l.prototype.setAttribute=function(t,e,r,n){return void 0===n&&(n=null),n?t.setAttributeNS(n,e,r):t.setAttribute(e,r)},l.prototype.getAttribute=function(t,e){return t.getAttribute(e)},l.prototype.removeAttribute=function(t,e){return t.removeAttribute(e)},l.prototype.hasAttribute=function(t,e){return t.hasAttribute(e)},l.prototype.allAttributes=function(t){return Array.from(t.attributes).map(function(t){return{name:t.name,value:t.value}})},l.prototype.addClass=function(t,e){t.classList.add(e)},l.prototype.removeClass=function(t,e){return t.classList.remove(e)},l.prototype.hasClass=function(t,e){return t.classList.contains(e)},l.prototype.setStyle=function(t,e,r){t.style[e]=r},l.prototype.getStyle=function(t,e){return t.style[e]},l.prototype.allStyles=function(t){return t.style.cssText},l.prototype.fontSize=function(t){var e=this.window.getComputedStyle(t);return parseFloat(e.fontSize)},l.prototype.nodeSize=function(t,e,r){if(void 0===e&&(e=1),void 0===r&&(r=!1),r&&t.getBBox){var n=t.getBBox();return[n.width/e,n.height/e]}return[t.offsetWidth/e,t.offsetHeight/e]},l.prototype.nodeBBox=function(t){var e=t.getBoundingClientRect();return{left:e.left,right:e.right,top:e.top,bottom:e.bottom}},l);function l(t){var e=o.call(this,t.document)||this;return e.window=t,e.parser=new t.DOMParser,e}e.HTMLAdaptor=s},function(t,e,r){"use strict";var m=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var n=(i.prototype.node=function(t,e,r,n){var i,o;void 0===e&&(e={}),void 0===r&&(r=[]);var a=this.create(t,n);this.setAttributes(a,e);try{for(var s=m(r),c=s.next();!c.done;c=s.next()){var l=c.value;this.append(a,l)}}catch(t){i={error:t}}finally{try{c&&!c.done&&(o=s.return)&&o.call(s)}finally{if(i)throw i.error}}return a},i.prototype.setAttributes=function(t,e){var r,n,i,o,a,s;if(e.style&&"string"!=typeof e.style)try{for(var c=m(Object.keys(e.style)),l=c.next();!l.done;l=c.next()){var u=l.value;this.setStyle(t,u.replace(/-([a-z])/g,function(t,e){return e.toUpperCase()}),e.style[u])}}catch(t){r={error:t}}finally{try{l&&!l.done&&(n=c.return)&&n.call(c)}finally{if(r)throw r.error}}if(e.properties)try{for(var h=m(Object.keys(e.properties)),f=h.next();!f.done;f=h.next())t[u=f.value]=e.properties[u]}catch(t){i={error:t}}finally{try{f&&!f.done&&(o=h.return)&&o.call(h)}finally{if(i)throw i.error}}try{for(var p=m(Object.keys(e)),d=p.next();!d.done;d=p.next())"style"===(u=d.value)&&"string"!=typeof e.style||"properties"===u||this.setAttribute(t,u,e[u])}catch(t){a={error:t}}finally{try{d&&!d.done&&(s=p.return)&&s.call(p)}finally{if(a)throw a.error}}},i.prototype.replace=function(t,e){return this.insert(t,e),this.remove(e),e},i.prototype.childNode=function(t,e){return this.childNodes(t)[e]},i.prototype.allClasses=function(t){var e=this.getAttribute(t,"class");return e?e.replace(/ +/g," ").replace(/^ /,"").replace(/ $/,"").split(/ /):[]},i);function i(t){void 0===t&&(t=null),this.document=t}e.AbstractDOMAdaptor=n},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(79);e.browserAdaptor=function(){return new n.HTMLAdaptor(window)}},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)});Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(41),s=(o=a.AbstractMathDocument,i(c,o),c);function c(){return null!==o&&o.apply(this,arguments)||this}var l=(Object.defineProperty(u.prototype,"name",{get:function(){return this.constructor.NAME},enumerable:!0,configurable:!0}),u.prototype.handlesDocument=function(t){return!1},u.prototype.create=function(t,e){return new this.documentClass(t,this.adaptor,e)},u.NAME="generic",u);function u(t,e){void 0===e&&(e=5),this.documentClass=s,this.adaptor=t,this.priority=e}e.AbstractHandler=l},function(t,e,r){"use strict";var l=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var h=Symbol(),c=function(t){void 0===t&&(t=null),this.next=null,this.prev=null,this.data=t};e.ListItem=c;var i=(f.prototype.toArray=function(){return Array.from(this)},f.prototype.isBefore=function(t,e){return t":e.MO.BIN5,".":[0,3,i.TEXCLASS.PUNCT,{separator:!0}],"/":e.MO.ORD11,"//":n(1,1),"/=":e.MO.BIN4,":":[1,2,i.TEXCLASS.REL,null],":=":e.MO.BIN4,";":[0,3,i.TEXCLASS.PUNCT,{linebreakstyle:"after",separator:!0}],"<":e.MO.REL,"<=":e.MO.BIN5,"<>":n(1,1),"=":e.MO.REL,"==":e.MO.BIN4,">":e.MO.REL,">=":e.MO.BIN5,"?":[1,1,i.TEXCLASS.CLOSE,null],"@":e.MO.ORD11,"\\":e.MO.ORD,"^":e.MO.ORD11,_:e.MO.ORD11,"|":[2,2,i.TEXCLASS.ORD,{fence:!0,stretchy:!0,symmetric:!0}],"||":[2,2,i.TEXCLASS.BIN,{fence:!0,stretchy:!0,symmetric:!0}],"|||":[2,2,i.TEXCLASS.ORD,{fence:!0,stretchy:!0,symmetric:!0}],"\xb1":e.MO.BIN4,"\xb7":e.MO.BIN4,"\xd7":e.MO.BIN4,"\xf7":e.MO.BIN4,"\u02b9":e.MO.ORD,"\u0300":e.MO.ACCENT,"\u0301":e.MO.ACCENT,"\u0303":e.MO.WIDEACCENT,"\u0304":e.MO.ACCENT,"\u0306":e.MO.ACCENT,"\u0307":e.MO.ACCENT,"\u0308":e.MO.ACCENT,"\u030c":e.MO.ACCENT,"\u0332":e.MO.WIDEACCENT,"\u0338":e.MO.REL4,"\u2015":[0,0,i.TEXCLASS.ORD,{stretchy:!0}],"\u2017":[0,0,i.TEXCLASS.ORD,{stretchy:!0}],"\u2020":e.MO.BIN3,"\u2021":e.MO.BIN3,"\u2022":e.MO.BIN4,"\u2026":e.MO.INNER,"\u2044":e.MO.TALLBIN,"\u2061":e.MO.ORD,"\u2062":e.MO.ORD,"\u2063":[0,0,i.TEXCLASS.ORD,{linebreakstyle:"after",separator:!0}],"\u2064":e.MO.ORD,"\u20d7":e.MO.ACCENT,"\u2111":e.MO.ORD,"\u2113":e.MO.ORD,"\u2118":e.MO.ORD,"\u211c":e.MO.ORD,"\u2190":e.MO.WIDEREL,"\u2191":e.MO.RELSTRETCH,"\u2192":e.MO.WIDEREL,"\u2193":e.MO.RELSTRETCH,"\u2194":e.MO.WIDEREL,"\u2195":e.MO.RELSTRETCH,"\u2196":e.MO.RELSTRETCH,"\u2197":e.MO.RELSTRETCH,"\u2198":e.MO.RELSTRETCH,"\u2199":e.MO.RELSTRETCH,"\u219a":e.MO.RELACCENT,"\u219b":e.MO.RELACCENT,"\u219c":e.MO.WIDEREL,"\u219d":e.MO.WIDEREL,"\u219e":e.MO.WIDEREL,"\u219f":e.MO.WIDEREL,"\u21a0":e.MO.WIDEREL,"\u21a1":e.MO.RELSTRETCH,"\u21a2":e.MO.WIDEREL,"\u21a3":e.MO.WIDEREL,"\u21a4":e.MO.WIDEREL,"\u21a5":e.MO.RELSTRETCH,"\u21a6":e.MO.WIDEREL,"\u21a7":e.MO.RELSTRETCH,"\u21a8":e.MO.RELSTRETCH,"\u21a9":e.MO.WIDEREL,"\u21aa":e.MO.WIDEREL,"\u21ab":e.MO.WIDEREL,"\u21ac":e.MO.WIDEREL,"\u21ad":e.MO.WIDEREL,"\u21ae":e.MO.RELACCENT,"\u21af":e.MO.RELSTRETCH,"\u21b0":e.MO.RELSTRETCH,"\u21b1":e.MO.RELSTRETCH,"\u21b2":e.MO.RELSTRETCH,"\u21b3":e.MO.RELSTRETCH,"\u21b4":e.MO.RELSTRETCH,"\u21b5":e.MO.RELSTRETCH,"\u21b6":e.MO.RELACCENT,"\u21b7":e.MO.RELACCENT,"\u21b8":e.MO.REL,"\u21b9":e.MO.WIDEREL,"\u21ba":e.MO.REL,"\u21bb":e.MO.REL,"\u21bc":e.MO.WIDEREL,"\u21bd":e.MO.WIDEREL,"\u21be":e.MO.RELSTRETCH,"\u21bf":e.MO.RELSTRETCH,"\u21c0":e.MO.WIDEREL,"\u21c1":e.MO.WIDEREL,"\u21c2":e.MO.RELSTRETCH,"\u21c3":e.MO.RELSTRETCH,"\u21c4":e.MO.WIDEREL,"\u21c5":e.MO.RELSTRETCH,"\u21c6":e.MO.WIDEREL,"\u21c7":e.MO.WIDEREL,"\u21c8":e.MO.RELSTRETCH,"\u21c9":e.MO.WIDEREL,"\u21ca":e.MO.RELSTRETCH,"\u21cb":e.MO.WIDEREL,"\u21cc":e.MO.WIDEREL,"\u21cd":e.MO.RELACCENT,"\u21ce":e.MO.RELACCENT,"\u21cf":e.MO.RELACCENT,"\u21d0":e.MO.WIDEREL,"\u21d1":e.MO.RELSTRETCH,"\u21d2":e.MO.WIDEREL,"\u21d3":e.MO.RELSTRETCH,"\u21d4":e.MO.WIDEREL,"\u21d5":e.MO.RELSTRETCH,"\u21d6":e.MO.RELSTRETCH,"\u21d7":e.MO.RELSTRETCH,"\u21d8":e.MO.RELSTRETCH,"\u21d9":e.MO.RELSTRETCH,"\u21da":e.MO.WIDEREL,"\u21db":e.MO.WIDEREL,"\u21dc":e.MO.WIDEREL,"\u21dd":e.MO.WIDEREL,"\u21de":e.MO.REL,"\u21df":e.MO.REL,"\u21e0":e.MO.WIDEREL,"\u21e1":e.MO.RELSTRETCH,"\u21e2":e.MO.WIDEREL,"\u21e3":e.MO.RELSTRETCH,"\u21e4":e.MO.WIDEREL,"\u21e5":e.MO.WIDEREL,"\u21e6":e.MO.WIDEREL,"\u21e7":e.MO.RELSTRETCH,"\u21e8":e.MO.WIDEREL,"\u21e9":e.MO.RELSTRETCH,"\u21ea":e.MO.RELSTRETCH,"\u21eb":e.MO.RELSTRETCH,"\u21ec":e.MO.RELSTRETCH,"\u21ed":e.MO.RELSTRETCH,"\u21ee":e.MO.RELSTRETCH,"\u21ef":e.MO.RELSTRETCH,"\u21f0":e.MO.WIDEREL,"\u21f1":e.MO.REL,"\u21f2":e.MO.REL,"\u21f3":e.MO.RELSTRETCH,"\u21f4":e.MO.RELACCENT,"\u21f5":e.MO.RELSTRETCH,"\u21f6":e.MO.WIDEREL,"\u21f7":e.MO.RELACCENT,"\u21f8":e.MO.RELACCENT,"\u21f9":e.MO.RELACCENT,"\u21fa":e.MO.RELACCENT,"\u21fb":e.MO.RELACCENT,"\u21fc":e.MO.RELACCENT,"\u21fd":e.MO.WIDEREL,"\u21fe":e.MO.WIDEREL,"\u21ff":e.MO.WIDEREL,"\u2201":n(1,2,i.TEXCLASS.ORD),"\u2205":e.MO.ORD,"\u2206":e.MO.BIN3,"\u2208":e.MO.REL,"\u2209":e.MO.REL,"\u220a":e.MO.REL,"\u220b":e.MO.REL,"\u220c":e.MO.REL,"\u220d":e.MO.REL,"\u220e":e.MO.BIN3,"\u2212":e.MO.BIN4,"\u2213":e.MO.BIN4,"\u2214":e.MO.BIN4,"\u2215":e.MO.TALLBIN,"\u2216":e.MO.BIN4,"\u2217":e.MO.BIN4,"\u2218":e.MO.BIN4,"\u2219":e.MO.BIN4,"\u221d":e.MO.REL,"\u221e":e.MO.ORD,"\u221f":e.MO.REL,"\u2223":e.MO.REL,"\u2224":e.MO.REL,"\u2225":e.MO.REL,"\u2226":e.MO.REL,"\u2227":e.MO.BIN4,"\u2228":e.MO.BIN4,"\u2229":e.MO.BIN4,"\u222a":e.MO.BIN4,"\u2234":e.MO.REL,"\u2235":e.MO.REL,"\u2236":e.MO.REL,"\u2237":e.MO.REL,"\u2238":e.MO.BIN4,"\u2239":e.MO.REL,"\u223a":e.MO.BIN4,"\u223b":e.MO.REL,"\u223c":e.MO.REL,"\u223d":e.MO.REL,"\u223d\u0331":e.MO.BIN3,"\u223e":e.MO.REL,"\u223f":e.MO.BIN3,"\u2240":e.MO.BIN4,"\u2241":e.MO.REL,"\u2242":e.MO.REL,"\u2242\u0338":e.MO.REL,"\u2243":e.MO.REL,"\u2244":e.MO.REL,"\u2245":e.MO.REL,"\u2246":e.MO.REL,"\u2247":e.MO.REL,"\u2248":e.MO.REL,"\u2249":e.MO.REL,"\u224a":e.MO.REL,"\u224b":e.MO.REL,"\u224c":e.MO.REL,"\u224d":e.MO.REL,"\u224e":e.MO.REL,"\u224e\u0338":e.MO.REL,"\u224f":e.MO.REL,"\u224f\u0338":e.MO.REL,"\u2250":e.MO.REL,"\u2251":e.MO.REL,"\u2252":e.MO.REL,"\u2253":e.MO.REL,"\u2254":e.MO.REL,"\u2255":e.MO.REL,"\u2256":e.MO.REL,"\u2257":e.MO.REL,"\u2258":e.MO.REL,"\u2259":e.MO.REL,"\u225a":e.MO.REL,"\u225c":e.MO.REL,"\u225d":e.MO.REL,"\u225e":e.MO.REL,"\u225f":e.MO.REL,"\u2260":e.MO.REL,"\u2261":e.MO.REL,"\u2262":e.MO.REL,"\u2263":e.MO.REL,"\u2264":e.MO.REL,"\u2265":e.MO.REL,"\u2266":e.MO.REL,"\u2266\u0338":e.MO.REL,"\u2267":e.MO.REL,"\u2268":e.MO.REL,"\u2269":e.MO.REL,"\u226a":e.MO.REL,"\u226a\u0338":e.MO.REL,"\u226b":e.MO.REL,"\u226b\u0338":e.MO.REL,"\u226c":e.MO.REL,"\u226d":e.MO.REL,"\u226e":e.MO.REL,"\u226f":e.MO.REL,"\u2270":e.MO.REL,"\u2271":e.MO.REL,"\u2272":e.MO.REL,"\u2273":e.MO.REL,"\u2274":e.MO.REL,"\u2275":e.MO.REL,"\u2276":e.MO.REL,"\u2277":e.MO.REL,"\u2278":e.MO.REL,"\u2279":e.MO.REL,"\u227a":e.MO.REL,"\u227b":e.MO.REL,"\u227c":e.MO.REL,"\u227d":e.MO.REL,"\u227e":e.MO.REL,"\u227f":e.MO.REL,"\u227f\u0338":e.MO.REL,"\u2280":e.MO.REL,"\u2281":e.MO.REL,"\u2282":e.MO.REL,"\u2282\u20d2":e.MO.REL,"\u2283":e.MO.REL,"\u2283\u20d2":e.MO.REL,"\u2284":e.MO.REL,"\u2285":e.MO.REL,"\u2286":e.MO.REL,"\u2287":e.MO.REL,"\u2288":e.MO.REL,"\u2289":e.MO.REL,"\u228a":e.MO.REL,"\u228b":e.MO.REL,"\u228c":e.MO.BIN4,"\u228d":e.MO.BIN4,"\u228e":e.MO.BIN4,"\u228f":e.MO.REL,"\u228f\u0338":e.MO.REL,"\u2290":e.MO.REL,"\u2290\u0338":e.MO.REL,"\u2291":e.MO.REL,"\u2292":e.MO.REL,"\u2293":e.MO.BIN4,"\u2294":e.MO.BIN4,"\u2295":e.MO.BIN4,"\u2296":e.MO.BIN4,"\u2297":e.MO.BIN4,"\u2298":e.MO.BIN4,"\u2299":e.MO.BIN4,"\u229a":e.MO.BIN4,"\u229b":e.MO.BIN4,"\u229c":e.MO.BIN4,"\u229d":e.MO.BIN4,"\u229e":e.MO.BIN4,"\u229f":e.MO.BIN4,"\u22a0":e.MO.BIN4,"\u22a1":e.MO.BIN4,"\u22a2":e.MO.REL,"\u22a3":e.MO.REL,"\u22a4":e.MO.ORD55,"\u22a5":e.MO.REL,"\u22a6":e.MO.REL,"\u22a7":e.MO.REL,"\u22a8":e.MO.REL,"\u22a9":e.MO.REL,"\u22aa":e.MO.REL,"\u22ab":e.MO.REL,"\u22ac":e.MO.REL,"\u22ad":e.MO.REL,"\u22ae":e.MO.REL,"\u22af":e.MO.REL,"\u22b0":e.MO.REL,"\u22b1":e.MO.REL,"\u22b2":e.MO.REL,"\u22b3":e.MO.REL,"\u22b4":e.MO.REL,"\u22b5":e.MO.REL,"\u22b6":e.MO.REL,"\u22b7":e.MO.REL,"\u22b8":e.MO.REL,"\u22b9":e.MO.REL,"\u22ba":e.MO.BIN4,"\u22bb":e.MO.BIN4,"\u22bc":e.MO.BIN4,"\u22bd":e.MO.BIN4,"\u22be":e.MO.BIN3,"\u22bf":e.MO.BIN3,"\u22c4":e.MO.BIN4,"\u22c5":e.MO.BIN4,"\u22c6":e.MO.BIN4,"\u22c7":e.MO.BIN4,"\u22c8":e.MO.REL,"\u22c9":e.MO.BIN4,"\u22ca":e.MO.BIN4,"\u22cb":e.MO.BIN4,"\u22cc":e.MO.BIN4,"\u22cd":e.MO.REL,"\u22ce":e.MO.BIN4,"\u22cf":e.MO.BIN4,"\u22d0":e.MO.REL,"\u22d1":e.MO.REL,"\u22d2":e.MO.BIN4,"\u22d3":e.MO.BIN4,"\u22d4":e.MO.REL,"\u22d5":e.MO.REL,"\u22d6":e.MO.REL,"\u22d7":e.MO.REL,"\u22d8":e.MO.REL,"\u22d9":e.MO.REL,"\u22da":e.MO.REL,"\u22db":e.MO.REL,"\u22dc":e.MO.REL,"\u22dd":e.MO.REL,"\u22de":e.MO.REL,"\u22df":e.MO.REL,"\u22e0":e.MO.REL,"\u22e1":e.MO.REL,"\u22e2":e.MO.REL,"\u22e3":e.MO.REL,"\u22e4":e.MO.REL,"\u22e5":e.MO.REL,"\u22e6":e.MO.REL,"\u22e7":e.MO.REL,"\u22e8":e.MO.REL,"\u22e9":e.MO.REL,"\u22ea":e.MO.REL,"\u22eb":e.MO.REL,"\u22ec":e.MO.REL,"\u22ed":e.MO.REL,"\u22ee":e.MO.ORD55,"\u22ef":e.MO.INNER,"\u22f0":e.MO.REL,"\u22f1":[5,5,i.TEXCLASS.INNER,null],"\u22f2":e.MO.REL,"\u22f3":e.MO.REL,"\u22f4":e.MO.REL,"\u22f5":e.MO.REL,"\u22f6":e.MO.REL,"\u22f7":e.MO.REL,"\u22f8":e.MO.REL,"\u22f9":e.MO.REL,"\u22fa":e.MO.REL,"\u22fb":e.MO.REL,"\u22fc":e.MO.REL,"\u22fd":e.MO.REL,"\u22fe":e.MO.REL,"\u22ff":e.MO.REL,"\u2305":e.MO.BIN3,"\u2306":e.MO.BIN3,"\u2322":e.MO.REL4,"\u2323":e.MO.REL4,"\u2329":e.MO.OPEN,"\u232a":e.MO.CLOSE,"\u23aa":e.MO.ORD,"\u23af":[0,0,i.TEXCLASS.ORD,{stretchy:!0}],"\u23b0":e.MO.OPEN,"\u23b1":e.MO.CLOSE,"\u2500":e.MO.ORD,"\u25b3":e.MO.BIN4,"\u25b5":e.MO.BIN4,"\u25b9":e.MO.BIN4,"\u25bd":e.MO.BIN4,"\u25bf":e.MO.BIN4,"\u25c3":e.MO.BIN4,"\u25ef":e.MO.BIN3,"\u2660":e.MO.ORD,"\u2661":e.MO.ORD,"\u2662":e.MO.ORD,"\u2663":e.MO.ORD,"\u2758":e.MO.REL,"\u27f0":e.MO.RELSTRETCH,"\u27f1":e.MO.RELSTRETCH,"\u27f5":e.MO.WIDEREL,"\u27f6":e.MO.WIDEREL,"\u27f7":e.MO.WIDEREL,"\u27f8":e.MO.WIDEREL,"\u27f9":e.MO.WIDEREL,"\u27fa":e.MO.WIDEREL,"\u27fb":e.MO.WIDEREL,"\u27fc":e.MO.WIDEREL,"\u27fd":e.MO.WIDEREL,"\u27fe":e.MO.WIDEREL,"\u27ff":e.MO.WIDEREL,"\u2900":e.MO.RELACCENT,"\u2901":e.MO.RELACCENT,"\u2902":e.MO.RELACCENT,"\u2903":e.MO.RELACCENT,"\u2904":e.MO.RELACCENT,"\u2905":e.MO.RELACCENT,"\u2906":e.MO.RELACCENT,"\u2907":e.MO.RELACCENT,"\u2908":e.MO.REL,"\u2909":e.MO.REL,"\u290a":e.MO.RELSTRETCH,"\u290b":e.MO.RELSTRETCH,"\u290c":e.MO.WIDEREL,"\u290d":e.MO.WIDEREL,"\u290e":e.MO.WIDEREL,"\u290f":e.MO.WIDEREL,"\u2910":e.MO.WIDEREL,"\u2911":e.MO.RELACCENT,"\u2912":e.MO.RELSTRETCH,"\u2913":e.MO.RELSTRETCH,"\u2914":e.MO.RELACCENT,"\u2915":e.MO.RELACCENT,"\u2916":e.MO.RELACCENT,"\u2917":e.MO.RELACCENT,"\u2918":e.MO.RELACCENT,"\u2919":e.MO.RELACCENT,"\u291a":e.MO.RELACCENT,"\u291b":e.MO.RELACCENT,"\u291c":e.MO.RELACCENT,"\u291d":e.MO.RELACCENT,"\u291e":e.MO.RELACCENT,"\u291f":e.MO.RELACCENT,"\u2920":e.MO.RELACCENT,"\u2921":e.MO.RELSTRETCH,"\u2922":e.MO.RELSTRETCH,"\u2923":e.MO.REL,"\u2924":e.MO.REL,"\u2925":e.MO.REL,"\u2926":e.MO.REL,"\u2927":e.MO.REL,"\u2928":e.MO.REL,"\u2929":e.MO.REL,"\u292a":e.MO.REL,"\u292b":e.MO.REL,"\u292c":e.MO.REL,"\u292d":e.MO.REL,"\u292e":e.MO.REL,"\u292f":e.MO.REL,"\u2930":e.MO.REL,"\u2931":e.MO.REL,"\u2932":e.MO.REL,"\u2933":e.MO.RELACCENT,"\u2934":e.MO.REL,"\u2935":e.MO.REL,"\u2936":e.MO.REL,"\u2937":e.MO.REL,"\u2938":e.MO.REL,"\u2939":e.MO.REL,"\u293a":e.MO.RELACCENT,"\u293b":e.MO.RELACCENT,"\u293c":e.MO.RELACCENT,"\u293d":e.MO.RELACCENT,"\u293e":e.MO.REL,"\u293f":e.MO.REL,"\u2940":e.MO.REL,"\u2941":e.MO.REL,"\u2942":e.MO.RELACCENT,"\u2943":e.MO.RELACCENT,"\u2944":e.MO.RELACCENT,"\u2945":e.MO.RELACCENT,"\u2946":e.MO.RELACCENT,"\u2947":e.MO.RELACCENT,"\u2948":e.MO.RELACCENT,"\u2949":e.MO.REL,"\u294a":e.MO.RELACCENT,"\u294b":e.MO.RELACCENT,"\u294c":e.MO.REL,"\u294d":e.MO.REL,"\u294e":e.MO.WIDEREL,"\u294f":e.MO.RELSTRETCH,"\u2950":e.MO.WIDEREL,"\u2951":e.MO.RELSTRETCH,"\u2952":e.MO.WIDEREL,"\u2953":e.MO.WIDEREL,"\u2954":e.MO.RELSTRETCH,"\u2955":e.MO.RELSTRETCH,"\u2956":e.MO.RELSTRETCH,"\u2957":e.MO.RELSTRETCH,"\u2958":e.MO.RELSTRETCH,"\u2959":e.MO.RELSTRETCH,"\u295a":e.MO.WIDEREL,"\u295b":e.MO.WIDEREL,"\u295c":e.MO.RELSTRETCH,"\u295d":e.MO.RELSTRETCH,"\u295e":e.MO.WIDEREL,"\u295f":e.MO.WIDEREL,"\u2960":e.MO.RELSTRETCH,"\u2961":e.MO.RELSTRETCH,"\u2962":e.MO.RELACCENT,"\u2963":e.MO.REL,"\u2964":e.MO.RELACCENT,"\u2965":e.MO.REL,"\u2966":e.MO.RELACCENT,"\u2967":e.MO.RELACCENT,"\u2968":e.MO.RELACCENT,"\u2969":e.MO.RELACCENT,"\u296a":e.MO.RELACCENT,"\u296b":e.MO.RELACCENT,"\u296c":e.MO.RELACCENT,"\u296d":e.MO.RELACCENT,"\u296e":e.MO.RELSTRETCH,"\u296f":e.MO.RELSTRETCH,"\u2970":e.MO.RELACCENT,"\u2971":e.MO.RELACCENT,"\u2972":e.MO.RELACCENT,"\u2973":e.MO.RELACCENT,"\u2974":e.MO.RELACCENT,"\u2975":e.MO.RELACCENT,"\u2976":e.MO.RELACCENT,"\u2977":e.MO.RELACCENT,"\u2978":e.MO.RELACCENT,"\u2979":e.MO.RELACCENT,"\u297a":e.MO.RELACCENT,"\u297b":e.MO.RELACCENT,"\u297c":e.MO.RELACCENT,"\u297d":e.MO.RELACCENT,"\u297e":e.MO.REL,"\u297f":e.MO.REL,"\u2981":e.MO.BIN3,"\u2982":e.MO.BIN3,"\u2999":e.MO.BIN3,"\u299a":e.MO.BIN3,"\u299b":e.MO.BIN3,"\u299c":e.MO.BIN3,"\u299d":e.MO.BIN3,"\u299e":e.MO.BIN3,"\u299f":e.MO.BIN3,"\u29a0":e.MO.BIN3,"\u29a1":e.MO.BIN3,"\u29a2":e.MO.BIN3,"\u29a3":e.MO.BIN3,"\u29a4":e.MO.BIN3,"\u29a5":e.MO.BIN3,"\u29a6":e.MO.BIN3,"\u29a7":e.MO.BIN3,"\u29a8":e.MO.BIN3,"\u29a9":e.MO.BIN3,"\u29aa":e.MO.BIN3,"\u29ab":e.MO.BIN3,"\u29ac":e.MO.BIN3,"\u29ad":e.MO.BIN3,"\u29ae":e.MO.BIN3,"\u29af":e.MO.BIN3,"\u29b0":e.MO.BIN3,"\u29b1":e.MO.BIN3,"\u29b2":e.MO.BIN3,"\u29b3":e.MO.BIN3,"\u29b4":e.MO.BIN3,"\u29b5":e.MO.BIN3,"\u29b6":e.MO.BIN4,"\u29b7":e.MO.BIN4,"\u29b8":e.MO.BIN4,"\u29b9":e.MO.BIN4,"\u29ba":e.MO.BIN4,"\u29bb":e.MO.BIN4,"\u29bc":e.MO.BIN4,"\u29bd":e.MO.BIN4,"\u29be":e.MO.BIN4,"\u29bf":e.MO.BIN4,"\u29c0":e.MO.REL,"\u29c1":e.MO.REL,"\u29c2":e.MO.BIN3,"\u29c3":e.MO.BIN3,"\u29c4":e.MO.BIN4,"\u29c5":e.MO.BIN4,"\u29c6":e.MO.BIN4,"\u29c7":e.MO.BIN4,"\u29c8":e.MO.BIN4,"\u29c9":e.MO.BIN3,"\u29ca":e.MO.BIN3,"\u29cb":e.MO.BIN3,"\u29cc":e.MO.BIN3,"\u29cd":e.MO.BIN3,"\u29ce":e.MO.REL,"\u29cf":e.MO.REL,"\u29cf\u0338":e.MO.REL,"\u29d0":e.MO.REL,"\u29d0\u0338":e.MO.REL,"\u29d1":e.MO.REL,"\u29d2":e.MO.REL,"\u29d3":e.MO.REL,"\u29d4":e.MO.REL,"\u29d5":e.MO.REL,"\u29d6":e.MO.BIN4,"\u29d7":e.MO.BIN4,"\u29d8":e.MO.BIN3,"\u29d9":e.MO.BIN3,"\u29db":e.MO.BIN3,"\u29dc":e.MO.BIN3,"\u29dd":e.MO.BIN3,"\u29de":e.MO.REL,"\u29df":e.MO.BIN3,"\u29e0":e.MO.BIN3,"\u29e1":e.MO.REL,"\u29e2":e.MO.BIN4,"\u29e3":e.MO.REL,"\u29e4":e.MO.REL,"\u29e5":e.MO.REL,"\u29e6":e.MO.REL,"\u29e7":e.MO.BIN3,"\u29e8":e.MO.BIN3,"\u29e9":e.MO.BIN3,"\u29ea":e.MO.BIN3,"\u29eb":e.MO.BIN3,"\u29ec":e.MO.BIN3,"\u29ed":e.MO.BIN3,"\u29ee":e.MO.BIN3,"\u29ef":e.MO.BIN3,"\u29f0":e.MO.BIN3,"\u29f1":e.MO.BIN3,"\u29f2":e.MO.BIN3,"\u29f3":e.MO.BIN3,"\u29f4":e.MO.REL,"\u29f5":e.MO.BIN4,"\u29f6":e.MO.BIN4,"\u29f7":e.MO.BIN4,"\u29f8":e.MO.BIN3,"\u29f9":e.MO.BIN3,"\u29fa":e.MO.BIN3,"\u29fb":e.MO.BIN3,"\u29fe":e.MO.BIN4,"\u29ff":e.MO.BIN4,"\u2a1d":e.MO.BIN3,"\u2a1e":e.MO.BIN3,"\u2a1f":e.MO.BIN3,"\u2a20":e.MO.BIN3,"\u2a21":e.MO.BIN3,"\u2a22":e.MO.BIN4,"\u2a23":e.MO.BIN4,"\u2a24":e.MO.BIN4,"\u2a25":e.MO.BIN4,"\u2a26":e.MO.BIN4,"\u2a27":e.MO.BIN4,"\u2a28":e.MO.BIN4,"\u2a29":e.MO.BIN4,"\u2a2a":e.MO.BIN4,"\u2a2b":e.MO.BIN4,"\u2a2c":e.MO.BIN4,"\u2a2d":e.MO.BIN4,"\u2a2e":e.MO.BIN4,"\u2a2f":e.MO.BIN4,"\u2a30":e.MO.BIN4,"\u2a31":e.MO.BIN4,"\u2a32":e.MO.BIN4,"\u2a33":e.MO.BIN4,"\u2a34":e.MO.BIN4,"\u2a35":e.MO.BIN4,"\u2a36":e.MO.BIN4,"\u2a37":e.MO.BIN4,"\u2a38":e.MO.BIN4,"\u2a39":e.MO.BIN4,"\u2a3a":e.MO.BIN4,"\u2a3b":e.MO.BIN4,"\u2a3c":e.MO.BIN4,"\u2a3d":e.MO.BIN4,"\u2a3e":e.MO.BIN4,"\u2a3f":e.MO.BIN4,"\u2a40":e.MO.BIN4,"\u2a41":e.MO.BIN4,"\u2a42":e.MO.BIN4,"\u2a43":e.MO.BIN4,"\u2a44":e.MO.BIN4,"\u2a45":e.MO.BIN4,"\u2a46":e.MO.BIN4,"\u2a47":e.MO.BIN4,"\u2a48":e.MO.BIN4,"\u2a49":e.MO.BIN4,"\u2a4a":e.MO.BIN4,"\u2a4b":e.MO.BIN4,"\u2a4c":e.MO.BIN4,"\u2a4d":e.MO.BIN4,"\u2a4e":e.MO.BIN4,"\u2a4f":e.MO.BIN4,"\u2a50":e.MO.BIN4,"\u2a51":e.MO.BIN4,"\u2a52":e.MO.BIN4,"\u2a53":e.MO.BIN4,"\u2a54":e.MO.BIN4,"\u2a55":e.MO.BIN4,"\u2a56":e.MO.BIN4,"\u2a57":e.MO.BIN4,"\u2a58":e.MO.BIN4,"\u2a59":e.MO.REL,"\u2a5a":e.MO.BIN4,"\u2a5b":e.MO.BIN4,"\u2a5c":e.MO.BIN4,"\u2a5d":e.MO.BIN4,"\u2a5e":e.MO.BIN4,"\u2a5f":e.MO.BIN4,"\u2a60":e.MO.BIN4,"\u2a61":e.MO.BIN4,"\u2a62":e.MO.BIN4,"\u2a63":e.MO.BIN4,"\u2a64":e.MO.BIN4,"\u2a65":e.MO.BIN4,"\u2a66":e.MO.REL,"\u2a67":e.MO.REL,"\u2a68":e.MO.REL,"\u2a69":e.MO.REL,"\u2a6a":e.MO.REL,"\u2a6b":e.MO.REL,"\u2a6c":e.MO.REL,"\u2a6d":e.MO.REL,"\u2a6e":e.MO.REL,"\u2a6f":e.MO.REL,"\u2a70":e.MO.REL,"\u2a71":e.MO.BIN4,"\u2a72":e.MO.BIN4,"\u2a73":e.MO.REL,"\u2a74":e.MO.REL,"\u2a75":e.MO.REL,"\u2a76":e.MO.REL,"\u2a77":e.MO.REL,"\u2a78":e.MO.REL,"\u2a79":e.MO.REL,"\u2a7a":e.MO.REL,"\u2a7b":e.MO.REL,"\u2a7c":e.MO.REL,"\u2a7d":e.MO.REL,"\u2a7d\u0338":e.MO.REL,"\u2a7e":e.MO.REL,"\u2a7e\u0338":e.MO.REL,"\u2a7f":e.MO.REL,"\u2a80":e.MO.REL,"\u2a81":e.MO.REL,"\u2a82":e.MO.REL,"\u2a83":e.MO.REL,"\u2a84":e.MO.REL,"\u2a85":e.MO.REL,"\u2a86":e.MO.REL,"\u2a87":e.MO.REL,"\u2a88":e.MO.REL,"\u2a89":e.MO.REL,"\u2a8a":e.MO.REL,"\u2a8b":e.MO.REL,"\u2a8c":e.MO.REL,"\u2a8d":e.MO.REL,"\u2a8e":e.MO.REL,"\u2a8f":e.MO.REL,"\u2a90":e.MO.REL,"\u2a91":e.MO.REL,"\u2a92":e.MO.REL,"\u2a93":e.MO.REL,"\u2a94":e.MO.REL,"\u2a95":e.MO.REL,"\u2a96":e.MO.REL,"\u2a97":e.MO.REL,"\u2a98":e.MO.REL,"\u2a99":e.MO.REL,"\u2a9a":e.MO.REL,"\u2a9b":e.MO.REL,"\u2a9c":e.MO.REL,"\u2a9d":e.MO.REL,"\u2a9e":e.MO.REL,"\u2a9f":e.MO.REL,"\u2aa0":e.MO.REL,"\u2aa1":e.MO.REL,"\u2aa1\u0338":e.MO.REL,"\u2aa2":e.MO.REL,"\u2aa2\u0338":e.MO.REL,"\u2aa3":e.MO.REL,"\u2aa4":e.MO.REL,"\u2aa5":e.MO.REL,"\u2aa6":e.MO.REL,"\u2aa7":e.MO.REL,"\u2aa8":e.MO.REL,"\u2aa9":e.MO.REL,"\u2aaa":e.MO.REL,"\u2aab":e.MO.REL,"\u2aac":e.MO.REL,"\u2aad":e.MO.REL,"\u2aae":e.MO.REL,"\u2aaf":e.MO.REL,"\u2aaf\u0338":e.MO.REL,"\u2ab0":e.MO.REL,"\u2ab0\u0338":e.MO.REL,"\u2ab1":e.MO.REL,"\u2ab2":e.MO.REL,"\u2ab3":e.MO.REL,"\u2ab4":e.MO.REL,"\u2ab5":e.MO.REL,"\u2ab6":e.MO.REL,"\u2ab7":e.MO.REL,"\u2ab8":e.MO.REL,"\u2ab9":e.MO.REL,"\u2aba":e.MO.REL,"\u2abb":e.MO.REL,"\u2abc":e.MO.REL,"\u2abd":e.MO.REL,"\u2abe":e.MO.REL,"\u2abf":e.MO.REL,"\u2ac0":e.MO.REL,"\u2ac1":e.MO.REL,"\u2ac2":e.MO.REL,"\u2ac3":e.MO.REL,"\u2ac4":e.MO.REL,"\u2ac5":e.MO.REL,"\u2ac6":e.MO.REL,"\u2ac7":e.MO.REL,"\u2ac8":e.MO.REL,"\u2ac9":e.MO.REL,"\u2aca":e.MO.REL,"\u2acb":e.MO.REL,"\u2acc":e.MO.REL,"\u2acd":e.MO.REL,"\u2ace":e.MO.REL,"\u2acf":e.MO.REL,"\u2ad0":e.MO.REL,"\u2ad1":e.MO.REL,"\u2ad2":e.MO.REL,"\u2ad3":e.MO.REL,"\u2ad4":e.MO.REL,"\u2ad5":e.MO.REL,"\u2ad6":e.MO.REL,"\u2ad7":e.MO.REL,"\u2ad8":e.MO.REL,"\u2ad9":e.MO.REL,"\u2ada":e.MO.REL,"\u2adb":e.MO.REL,"\u2adc":e.MO.REL,"\u2add":e.MO.REL,"\u2ade":e.MO.REL,"\u2adf":e.MO.REL,"\u2ae0":e.MO.REL,"\u2ae1":e.MO.REL,"\u2ae2":e.MO.REL,"\u2ae3":e.MO.REL,"\u2ae4":e.MO.REL,"\u2ae5":e.MO.REL,"\u2ae6":e.MO.REL,"\u2ae7":e.MO.REL,"\u2ae8":e.MO.REL,"\u2ae9":e.MO.REL,"\u2aea":e.MO.REL,"\u2aeb":e.MO.REL,"\u2aec":e.MO.REL,"\u2aed":e.MO.REL,"\u2aee":e.MO.REL,"\u2aef":e.MO.REL,"\u2af0":e.MO.REL,"\u2af1":e.MO.REL,"\u2af2":e.MO.REL,"\u2af3":e.MO.REL,"\u2af4":e.MO.BIN4,"\u2af5":e.MO.BIN4,"\u2af6":e.MO.BIN4,"\u2af7":e.MO.REL,"\u2af8":e.MO.REL,"\u2af9":e.MO.REL,"\u2afa":e.MO.REL,"\u2afb":e.MO.BIN4,"\u2afd":e.MO.BIN4,"\u2afe":e.MO.BIN3,"\u2b45":e.MO.RELSTRETCH,"\u2b46":e.MO.RELSTRETCH,"\u3008":e.MO.OPEN,"\u3009":e.MO.CLOSE,"\ufe37":e.MO.WIDEACCENT,"\ufe38":e.MO.WIDEACCENT}},e.OPTABLE.infix["^"]=e.MO.WIDEREL,e.OPTABLE.infix._=e.MO.WIDEREL,e.OPTABLE.prefix["\u2223"]=e.MO.OPEN,e.OPTABLE.prefix["\u2225"]=e.MO.OPEN,e.OPTABLE.postfix["\u2223"]=e.MO.CLOSE,e.OPTABLE.postfix["\u2225"]=e.MO.CLOSE},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),o=this&&this.__assign||function(){return(o=Object.assign||function(t){for(var e,r=1,n=arguments.length;r=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},i=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var o,s=r(24),c=(o=s.PrioritizedList,i(l,o),l.prototype.register=function(t){return this.add(t,t.priority)},l.prototype.unregister=function(t){this.remove(t)},l.prototype.handlesDocument=function(t){var e,r;try{for(var n=a(this),i=n.next();!i.done;i=n.next()){var o=i.value.item;if(o.handlesDocument(t))return o}}catch(t){e={error:t}}finally{try{i&&!i.done&&(r=n.return)&&r.call(n)}finally{if(e)throw e.error}}throw new Error("Can't find handler for document")},l.prototype.document=function(t,e){return void 0===e&&(e=null),this.handlesDocument(t).create(t,e)},l);function l(){return null!==o&&o.apply(this,arguments)||this}e.HandlerList=c},function(t,e,r){"use strict";var c=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},n=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},s=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0\n",o=e;e+=" ";try{for(var a=l(t.childNodes),s=a.next();!s.done;s=a.next()){var c=s.value;i+=this.visitNode(c,e)}}catch(t){r={error:t}}finally{try{s&&!s.done&&(n=a.return)&&n.call(a)}finally{if(r)throw r.error}}return i+="\n"+o+""},h.prototype.visitAnnotationNode=function(t,e){return e+""+this.childNodeMml(t,"","")+""},h.prototype.visitDefault=function(t,e){var r=t.kind,n=s(t.isToken||0===t.childNodes.length?["",""]:["\n",e],2),i=n[0],o=n[1],a=this.childNodeMml(t,e+" ",i);return e+"<"+r+this.getAttributes(t)+">"+(a.match(/\S/)?i+a+o:"")+""},h.prototype.childNodeMml=function(t,e,r){var n,i,o="";try{for(var a=l(t.childNodes),s=a.next();!s.done;s=a.next()){var c=s.value;o+=this.visitNode(c,e)+r}}catch(t){n={error:t}}finally{try{s&&!s.done&&(i=a.return)&&i.call(a)}finally{if(n)throw n.error}}return o},h.prototype.getAttributes=function(t){var e,r,n="",i=t.attributes.getAllAttributes();try{for(var o=l(Object.keys(i)),a=o.next();!a.done;a=o.next()){var s=a.value;void 0!==i[s]&&(n+=" "+s+'="'+this.quoteHTML(i[s].toString())+'"')}}catch(t){e={error:t}}finally{try{a&&!a.done&&(r=o.return)&&r.call(o)}finally{if(e)throw e.error}}return n},h.prototype.quoteHTML=function(t){return t.replace(/&/g,"&").replace(//g,">").replace(/\"/g,""").replace(/([\uD800-\uDBFF].)/g,function(t,e){return"&#x"+(1024*(e.charCodeAt(0)-55296)+(e.charCodeAt(1)-56320)+65536).toString(16).toUpperCase()+";"}).replace(/([\u0080-\uD7FF\uE000-\uFFFF])/g,function(t,e){return"&#x"+e.charCodeAt(0).toString(16).toUpperCase()+";"})},h);function h(){return null!==o&&o.apply(this,arguments)||this}e.SerializedMmlVisitor=c},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=(Object.defineProperty(i.prototype,"kind",{get:function(){return this.node.kind},enumerable:!0,configurable:!0}),i.prototype.wrap=function(t){return this.factory.wrap(t)},i);function i(t,e){this.factory=t,this.node=e}e.AbstractWrapper=n},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),o=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var s,a=r(41),w=r(3),c=r(100),l=r(101),u=r(102),h=r(13),f=(s=a.AbstractMathDocument,i(p,s),p.prototype.findPosition=function(t,e,r,n){var i,o;try{for(var a=T(n[t]),s=a.next();!s.done;s=a.next()){var c=s.value,l=_(c,2),u=l[0],h=l[1];if(e<=h)return{node:u,n:e,delim:r};e-=h}}catch(t){i={error:t}}finally{try{s&&!s.done&&(o=a.return)&&o.call(a)}finally{if(i)throw i.error}}return{node:null,n:0,delim:r}},p.prototype.mathItem=function(t,e,r){var n=t.math,i=this.findPosition(t.n,t.start.n,t.open,r),o=this.findPosition(t.n,t.end.n,t.close,r);return new this.options.MathItem(n,e,t.display,i,o)},p.prototype.findMath=function(t){var e,r,n,i,o,a,s,c,l;if(!this.processed.isSet("findMath")){this.adaptor.document=this.document,t=w.userOptions({elements:[this.adaptor.body(this.document)]},t);try{for(var u=T(this.adaptor.getElements(t.elements,this.document)),h=u.next();!h.done;h=u.next()){var f=h.value,p=_([null,null],2),d=p[0],m=p[1];try{for(var y=(n=void 0,T(this.inputJax)),v=y.next();!v.done;v=y.next()){var b=v.value,g=new this.options.MathList;if(b.processStrings){null===d&&(d=(o=_(this.domStrings.find(f),2))[0],m=o[1]);try{for(var M=(a=void 0,T(b.findMath(d))),O=M.next();!O.done;O=M.next()){var x=O.value;g.push(this.mathItem(x,b,m))}}catch(t){a={error:t}}finally{try{O&&!O.done&&(s=M.return)&&s.call(M)}finally{if(a)throw a.error}}}else try{for(var S=(c=void 0,T(b.findMath(f))),E=S.next();!E.done;E=S.next()){x=E.value;var C=new this.options.MathItem(x.math,b,x.display,x.start,x.end);g.push(C)}}catch(t){c={error:t}}finally{try{E&&!E.done&&(l=S.return)&&l.call(S)}finally{if(c)throw c.error}}this.math.merge(g)}}catch(t){n={error:t}}finally{try{v&&!v.done&&(i=y.return)&&i.call(y)}finally{if(n)throw n.error}}}}catch(t){e={error:t}}finally{try{h&&!h.done&&(r=u.return)&&r.call(u)}finally{if(e)throw e.error}}this.processed.set("findMath")}return this},p.prototype.updateDocument=function(){return this.processed.isSet("updateDocument")||(this.addPageElements(),this.addStyleSheet(),s.prototype.updateDocument.call(this),this.processed.set("updateDocument")),this},p.prototype.addPageElements=function(){var t=this.adaptor.body(this.document),e=this.documentPageElements();e&&this.adaptor.append(t,e)},p.prototype.addStyleSheet=function(){var t=this.documentStyleSheet();if(t){var e=this.adaptor.head(this.document),r=this.findSheet(e,this.adaptor.getAttribute(t,"id"));r?this.adaptor.replace(t,r):this.adaptor.append(e,t)}},p.prototype.findSheet=function(t,e){var r,n;if(e)try{for(var i=T(this.adaptor.tags(t,"style")),o=i.next();!o.done;o=i.next()){var a=o.value;if(this.adaptor.getAttribute(a,"id")===e)return a}}catch(t){r={error:t}}finally{try{o&&!o.done&&(n=i.return)&&n.call(i)}finally{if(r)throw r.error}}return null},p.prototype.removeFromDocument=function(t){var e,r;if(void 0===t&&(t=!1),this.processed.isSet("updateDocument"))try{for(var n=T(this.math),i=n.next();!i.done;i=n.next()){var o=i.value;o.state()>=h.STATE.INSERTED&&o.state(h.STATE.TYPESET,t)}}catch(t){e={error:t}}finally{try{i&&!i.done&&(r=n.return)&&r.call(n)}finally{if(e)throw e.error}}return this.processed.clear("updateDocument"),this},p.prototype.documentStyleSheet=function(){return this.outputJax.styleSheet(this)},p.prototype.documentPageElements=function(){return this.outputJax.pageElements(this)},p.KIND="HTML",p.OPTIONS=o(o({},a.AbstractMathDocument.OPTIONS),{renderActions:w.expandable(o(o({},a.AbstractMathDocument.OPTIONS.renderActions),{styles:[h.STATE.INSERTED+1,"","updateStyleSheet",!1]})),MathList:l.HTMLMathList,MathItem:c.HTMLMathItem,DomStrings:null}),p);function p(t,e,r){var n=this,i=_(w.separateOptions(r,u.HTMLDomStrings.OPTIONS),2),o=i[0],a=i[1];return(n=s.call(this,t,e,o)||this).domStrings=n.options.DomStrings||new u.HTMLDomStrings(a),n.domStrings.adaptor=e,n}e.HTMLDocument=f},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)});Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(13),s=(o=a.AbstractMathItem,i(c,o),Object.defineProperty(c.prototype,"adaptor",{get:function(){return this.inputJax.adaptor},enumerable:!0,configurable:!0}),c.prototype.updateDocument=function(t){if(this.state()=a.STATE.TYPESET){var e=this.start.node,r=this.adaptor.text("");if(t){var n=this.start.delim+this.math+this.end.delim;if(this.inputJax.processStrings)r=this.adaptor.text(n);else{var i=this.adaptor.parse(n,"text/html");r=this.adaptor.firstChild(this.adaptor.body(i))}}this.adaptor.replace(r,e),this.start.node=this.end.node=r,this.start.n=this.end.n=0}},c);function c(t,e,r,n,i){return void 0===r&&(r=!0),void 0===n&&(n={node:null,n:0,delim:""}),void 0===i&&(i={node:null,n:0,delim:""}),o.call(this,t,e,r,n,i)||this}e.HTMLMathItem=s},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)});Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(43),s=(o=a.AbstractMathList,i(c,o),c);function c(){return null!==o&&o.apply(this,arguments)||this}e.HTMLMathList=s},function(t,e,r){"use strict";var s=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var a=r(29),u=r(3),h=r(106),f=r(107),s=r(6),p=r(21),d=r(4),m=r(110),y=r(27),v=r(11);r(113);var b,g=(b=a.AbstractInputJax,i(M,b),M.configure=function(t){var e,r,n=v.Configuration.empty();try{for(var i=c(t),o=i.next();!o.done;o=i.next()){var a=o.value,s=v.ConfigurationHandler.get(a);s&&n.append(s)}}catch(t){e={error:t}}finally{try{o&&!o.done&&(r=i.return)&&r.call(i)}finally{if(e)throw e.error}}return n.init(n),n},M.tags=function(t,e){y.TagsFactory.addTags(e.tags),y.TagsFactory.setDefault(t.options.tags),t.tags=y.TagsFactory.getDefault(),t.tags.configuration=t},M.prototype.setMmlFactory=function(t){b.prototype.setMmlFactory.call(this,t),this._parseOptions.nodeFactory.setMmlFactory(t)},Object.defineProperty(M.prototype,"parseOptions",{get:function(){return this._parseOptions},enumerable:!0,configurable:!0}),M.prototype.compile=function(t,e){this.parseOptions.clear(),this.executeFilters(this.preFilters,t,e,this.parseOptions);var r,n=t.display;this.latex=t.math,this.parseOptions.tags.startEquation(t);try{r=new p.default(this.latex,{display:n,isInner:!1},this.parseOptions).mml()}catch(t){if(!(t instanceof d.default))throw t;this.parseOptions.error=!0,r=this.formatError(t)}return r=this.parseOptions.nodeFactory.create("node","math",[r]),n&&s.default.setAttribute(r,"display","block"),this.parseOptions.tags.finishEquation(t),this.parseOptions.root=r,this.executeFilters(this.postFilters,t,e,this.parseOptions),this.mathNode=this.parseOptions.root,this.mathNode},M.prototype.findMath=function(t){return this.findTeX.findMath(t)},M.prototype.formatError=function(t){var e=t.message.replace(/\n.*/,"");return this.parseOptions.nodeFactory.create("error",e,t.id,this.latex)},M.NAME="TeX",M.OPTIONS=o(o({},a.AbstractInputJax.OPTIONS),{FindTeX:null,packages:["base"],digits:/^(?:[0-9]+(?:\{,\}[0-9]{3})*(?:\.[0-9]*)?|\.[0-9]+)/,maxBuffer:5120}),M);function M(t){void 0===t&&(t={});var e=this,r=l(u.separateOptions(t,M.OPTIONS,h.FindTeX.OPTIONS),3),n=r[0],i=r[1],o=r[2];(e=b.call(this,i)||this).findTeX=e.options.FindTeX||new h.FindTeX(o);var a=e.options.packages,s=e.configuration=M.configure(a),c=e._parseOptions=new m.default(s,[e.options,y.TagsFactory.OPTIONS]);return u.userOptions(c.options,n),s.config(s,e),M.tags(c,s),e.postFilters.add(f.default.cleanSubSup,-5),e.postFilters.add(f.default.setInherited,-4),e.postFilters.add(f.default.cleanStretchy,-3),e.postFilters.add(f.default.cleanAttributes,-2),e.postFilters.add(f.default.combineRelations,-1),e}e.TeX=g},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),h=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var n,d=r(0),m=r(6);!function(t){t.cleanStretchy=function(t){var e,r,n=t.data;try{for(var i=p(n.getList("fixStretchy")),o=i.next();!o.done;o=i.next()){var a=o.value;if(m.default.getProperty(a,"fixStretchy")){var s=m.default.getForm(a);s&&s[3]&&s[3].stretchy&&m.default.setAttribute(a,"stretchy",!1);var c=a.parent;if(!(m.default.getTexClass(a)||s&&s[2])){var l=n.nodeFactory.create("node","TeXAtom",[a]);c.replaceChild(l,a),l.inheritAttributesFrom(a)}m.default.removeProperties(a,"fixStretchy")}}}catch(t){e={error:t}}finally{try{o&&!o.done&&(r=i.return)&&r.call(i)}finally{if(e)throw e.error}}},t.cleanAttributes=function(t){t.data.root.walkTree(function(t,e){var r,n,i=t.attributes;try{for(var o=p(i.getExplicitNames()),a=o.next();!a.done;a=o.next()){var s=a.value;i.attributes[s]===t.attributes.getInherited(s)&&delete i.attributes[s]}}catch(t){r={error:t}}finally{try{a&&!a.done&&(n=o.return)&&n.call(o)}finally{if(r)throw r.error}}},{})},t.combineRelations=function(t){var e,r;try{for(var n=p(t.data.getList("mo")),i=n.next();!i.done;i=n.next()){var o=i.value;if(!o.getProperty("relationsCombined")&&o.parent&&(!o.parent||m.default.isType(o.parent,"mrow"))&&m.default.getTexClass(o)===d.TEXCLASS.REL){for(var a=o.parent,s=void 0,c=a.childNodes,l=c.indexOf(o)+1,u=m.default.getProperty(o,"variantForm");l\u20d2",nvinfin:"\u29de",nvlArr:"\u2902",nvle:"\u2264\u20d2",nvlt:"<\u20d2",nvltrie:"\u22b4\u20d2",nvrArr:"\u2903",nvrtrie:"\u22b5\u20d2",nvsim:"\u223c\u20d2",nwArr:"\u21d6",nwarhk:"\u2923",nwarrow:"\u2196",nwnear:"\u2927"},"n")},function(t,e,r){"use strict";var u=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},h=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var o=r(111),a=r(8),s=r(112),l=r(3),u=(h.prototype.pushParser=function(t){this.parsers.unshift(t)},h.prototype.popParser=function(){this.parsers.shift()},Object.defineProperty(h.prototype,"parser",{get:function(){return this.parsers[0]},enumerable:!0,configurable:!0}),h.prototype.clear=function(){this.parsers=[],this.root=null,this.nodeLists={},this.error=!1,this.tags.resetTag()},h.prototype.addNode=function(t,e){var r=this.nodeLists[t];(r=r||(this.nodeLists[t]=[])).push(e)},h.prototype.getList=function(t){var e,r,n=this.nodeLists[t]||[],i=[];try{for(var o=c(n),a=o.next();!a.done;a=o.next()){var s=a.value;this.inTree(s)&&i.push(s)}}catch(t){e={error:t}}finally{try{a&&!a.done&&(r=o.return)&&r.call(o)}finally{if(e)throw e.error}}return this.nodeLists[t]=i},h.prototype.inTree=function(t){for(;t&&t!==this.root;)t=t.parent;return!!t},h);function h(t,e){void 0===e&&(e=[]),this.options={},this.parsers=[],this.root=null,this.nodeLists={},this.error=!1,this.handlers=new a.SubHandlers(t),this.nodeFactory=new s.NodeFactory,(this.nodeFactory.configuration=this).nodeFactory.setCreators(t.nodes),this.itemFactory=new o.default(t.items),this.itemFactory.configuration=this,l.defaultOptions.apply(void 0,i([this.options],e)),l.defaultOptions(this.options,t.options)}e.default=u},function(t,e,r){"use strict";var n,i,o=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)});Object.defineProperty(e,"__esModule",{value:!0});var a,s=r(32),c=r(30),l=(a=s.BaseItem,o(u,a),u);function u(){return null!==a&&a.apply(this,arguments)||this}var h,f=(h=c.AbstractFactory,o(p,h),p.DefaultStackItems=((i={})[l.prototype.kind]=l,i),p);function p(){var t=null!==h&&h.apply(this,arguments)||this;return t.defaultKind="dummy",t.configuration=null,t}e.default=f},function(t,e,r){"use strict";var n=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},r=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0"),i=this.checkForErrors(this.adaptor.parse(n,"text/"+this.options.parseAs)),o=this.adaptor.body(i);1!==this.adaptor.childNodes(o).length&&this.error("MathML must consist of a single element"),r=this.adaptor.remove(this.adaptor.firstChild(o)),"math"!==this.adaptor.kind(r).replace(/^[a-z]+:/,"")&&this.error("MathML must be formed by a element, not <"+this.adaptor.kind(r)+">")}return r=this.executeFilters(this.mmlFilters,t,e,r),this.executeFilters(this.postFilters,t,e,this.mathml.compile(r))},p.prototype.checkForErrors=function(t){var e=this.adaptor.tags(this.adaptor.body(t),"parsererror")[0];return e&&(""===this.adaptor.textContent(e)&&this.error("Error processing MathML"),this.options.parseError.call(this,e)),t},p.prototype.error=function(t){throw new Error(t)},p.prototype.findMath=function(t){return this.findMathML.findMath(t)},p.NAME="MathML",p.OPTIONS=c.defaultOptions({parseAs:"html",forceReparse:!1,FindMathML:null,MathMLCompile:null,parseError:function(t){this.error(this.adaptor.textContent(t).replace(/\n.*/g,""))}},o.AbstractInputJax.OPTIONS),p);function p(t){void 0===t&&(t={});var e=this,r=a(c.separateOptions(t,u.FindMathML.OPTIONS,h.MathMLCompile.OPTIONS),3),n=r[0],i=r[1],o=r[2];return(e=s.call(this,n)||this).findMathML=e.options.FindMathML||new u.FindMathML(i),e.mathml=e.options.MathMLCompile||new h.MathMLCompile(o),e.mmlFilters=new l.FunctionList,e}e.MathML=f},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),d=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(40),m="http://www.w3.org/1998/Math/MathML",s=(o=a.AbstractFindMath,i(c,o),c.prototype.findMath=function(t){var e=new Set;this.findMathNodes(t,e),this.findMathPrefixed(t,e);var r=this.adaptor.root(this.adaptor.document);return"html"===this.adaptor.kind(r)&&0===e.size&&this.findMathNS(t,e),this.processMath(e)},c.prototype.findMathNodes=function(t,e){var r,n;try{for(var i=d(this.adaptor.tags(t,"math")),o=i.next();!o.done;o=i.next()){var a=o.value;e.add(a)}}catch(t){r={error:t}}finally{try{o&&!o.done&&(n=i.return)&&n.call(i)}finally{if(r)throw r.error}}},c.prototype.findMathPrefixed=function(t,e){var r,n,i,o,a=this.adaptor.root(this.adaptor.document);try{for(var s=d(this.adaptor.allAttributes(a)),c=s.next();!c.done;c=s.next()){var l=c.value;if("xmlns:"===l.name.substr(0,6)&&l.value===m){var u=l.name.substr(6);try{for(var h=(i=void 0,d(this.adaptor.tags(t,u+":math"))),f=h.next();!f.done;f=h.next()){var p=f.value;e.add(p)}}catch(t){i={error:t}}finally{try{f&&!f.done&&(o=h.return)&&o.call(h)}finally{if(i)throw i.error}}}}}catch(t){r={error:t}}finally{try{c&&!c.done&&(n=s.return)&&n.call(s)}finally{if(r)throw r.error}}},c.prototype.findMathNS=function(t,e){var r,n;try{for(var i=d(this.adaptor.tags(t,"math",m)),o=i.next();!o.done;o=i.next()){var a=o.value;e.add(a)}}catch(t){r={error:t}}finally{try{o&&!o.done&&(n=i.return)&&n.call(i)}finally{if(r)throw r.error}}},c.prototype.processMath=function(t){var e,r,n=[];try{for(var i=d(Array.from(t)),o=i.next();!o.done;o=i.next()){var a=o.value,s="block"===this.adaptor.getAttribute(a,"display")||"display"===this.adaptor.getAttribute(a,"mode"),c={node:a,n:0,delim:""},l={node:a,n:0,delim:""};n.push({math:this.adaptor.outerHTML(a),start:c,end:l,display:s})}}catch(t){e={error:t}}finally{try{o&&!o.done&&(r=i.return)&&r.call(i)}finally{if(e)throw e.error}}return n},c.OPTIONS={},c);function c(){return null!==o&&o.apply(this,arguments)||this}e.FindMathML=s},function(t,e,r){"use strict";var n=this&&this.__assign||function(){return(n=Object.assign||function(t){for(var e,r=1,n=arguments.length;r=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var i=r(0),o=r(3),a=r(12),s=(c.prototype.setMmlFactory=function(t){this.factory=t},c.prototype.compile=function(t){var e=this.makeNode(t);return e.verifyTree(this.options.verify),e.setInheritedAttributes({},!1,0,!1),e.walkTree(this.markMrows),e},c.prototype.makeNode=function(t){var e,r,n=!1,i="",o=this.adaptor.kind(t).replace(/^.*:/,"");try{for(var a=u(this.adaptor.allClasses(t)),s=a.next();!s.done;s=a.next()){var c=s.value;c.match(/^MJX-TeXAtom-/)?(i=c.substr(12),o="TeXAtom"):"MJX-fixedlimits"===c&&(n=!0)}}catch(t){e={error:t}}finally{try{s&&!s.done&&(r=a.return)&&r.call(a)}finally{if(e)throw e.error}}this.factory.getNodeClass(o)||this.error('Unknown node type "'+o+'"');var l=this.factory.create(o);return i&&this.texAtom(l,i,n),this.addAttributes(l,t),this.checkClass(l,t),this.addChildren(l,t),l},c.prototype.addAttributes=function(t,e){var r,n;try{for(var i=u(this.adaptor.allAttributes(e)),o=i.next();!o.done;o=i.next()){var a=o.value,s=a.name;if("class"!==s){var c=this.filterAttribute(s,a.value);if(null!==c){var l=c.toLowerCase();"true"===l||"false"===l?t.attributes.set(s,"true"===l):t.attributes.set(s,c)}}}}catch(t){r={error:t}}finally{try{o&&!o.done&&(n=i.return)&&n.call(i)}finally{if(r)throw r.error}}},c.prototype.filterAttribute=function(t,e){return e},c.prototype.addChildren=function(t,e){var r,n;if(0!==t.arity)try{for(var i=u(this.adaptor.childNodes(e)),o=i.next();!o.done;o=i.next()){var a=o.value,s=this.adaptor.kind(a);if("#comment"!==s)if("#text"===s)this.addText(t,a);else if(t.isKind("annotation-xml"))t.appendChild(this.factory.create("XML").setXML(a));else{var c=t.appendChild(this.makeNode(a));0===c.arity&&this.adaptor.childNodes(a).length&&(this.options.fixMisplacedChildren?this.addChildren(t,a):c.mError("There should not be children for "+c.kind+" nodes",this.options.verify,!0))}}}catch(t){r={error:t}}finally{try{o&&!o.done&&(n=i.return)&&n.call(i)}finally{if(r)throw r.error}}},c.prototype.addText=function(t,e){var r=this.adaptor.value(e);(t.isToken||t.getProperty("isChars"))&&t.arity?(t.isToken&&(r=a.translate(r),r=this.trimSpace(r)),t.appendChild(this.factory.create("text").setText(r))):r.match(/\S/)&&this.error('Unexpected text node "'+r+'"')},c.prototype.checkClass=function(t,e){var r,n,i=[];try{for(var o=u(this.adaptor.allClasses(e)),a=o.next();!a.done;a=o.next()){var s=a.value;"MJX-"===s.substr(0,4)?"MJX-variant"===s?t.setProperty("variantForm",!0):"MJX-TeXAtom"!==s.substr(0,11)&&t.attributes.set("mathvariant",s.substr(3)):i.push(s)}}catch(t){r={error:t}}finally{try{a&&!a.done&&(n=o.return)&&n.call(o)}finally{if(r)throw r.error}}i.length&&t.attributes.set("class",i.join(" "))},c.prototype.texAtom=function(t,e,r){t.texClass=i.TEXCLASS[e],"OP"!==e||r||(t.setProperty("movesupsub",!0),t.attributes.setInherited("movablelimits",!0))},c.prototype.markMrows=function(t){if(t.isKind("mrow")&&!t.isInferred&&2<=t.childNodes.length){var e=t.childNodes[0],r=t.childNodes[t.childNodes.length-1];e.isKind("mo")&&e.attributes.get("fence")&&r.isKind("mo")&&r.attributes.get("fence")&&(e.childNodes.length&&t.setProperty("open",e.getText()),r.childNodes.length&&t.setProperty("close",r.getText()))}},c.prototype.trimSpace=function(t){return t.replace(/[\t\n\r]/g," ").trim().replace(/ +/g," ")},c.prototype.error=function(t){throw new Error(t)},c.OPTIONS={MmlFactory:null,fixMisplacedChildren:!0,verify:{},translateEntities:!0},c.VERIFY=n({},i.AbstractMmlNode.verifyDefaults),c);function c(t){void 0===t&&(t={});var e=this.constructor;this.options=o.userOptions(o.defaultOptions({},e.OPTIONS),t),this.options.verify&&(this.options.verify=o.userOptions(o.defaultOptions({},e.VERIFY),this.options.verify))}e.MathMLCompile=s},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),o=this&&this.__assign||function(){return(o=Object.assign||function(t){for(var e,r=1,n=arguments.length;r=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var s,c=r(123),l=r(125),u=r(77),h=(s=c.CommonOutputJax,i(f,s),f.prototype.escaped=function(t,e){return this.setDocument(e),this.html("span",{},[this.text(t.math)])},f.prototype.styleSheet=function(t){var e=s.prototype.styleSheet.call(this,t);return this.adaptor.setAttribute(e,"id",f.STYLESHEETID),e},f.prototype.addClassStyles=function(t){var e;this.options.adaptiveCSS&&!t.used||(t.autoStyle&&"unknown"!==t.kind&&this.cssStyles.addStyles(((e={})["mjx-"+t.kind]={display:"inline-block","text-align":"left"},e)),s.prototype.addClassStyles.call(this,t))},f.prototype.processMath=function(t,e){this.factory.wrap(t).toCHTML(e)},f.prototype.clearCache=function(){var e,t;this.cssStyles.clear(),this.font.clearCache();try{for(var r=a(this.factory.getKinds()),n=r.next();!n.done;n=r.next()){var i=n.value;this.factory.getNodeClass(i).used=!1}}catch(t){e={error:t}}finally{try{n&&!n.done&&(t=r.return)&&t.call(r)}finally{if(e)throw e.error}}},f.prototype.unknownText=function(t,e){var r={},n=100/this.math.metrics.scale;return 100!=n&&(r["font-size"]=this.fixed(n,1)+"%"),"-explicitFont"!==e&&this.cssFontStyles(this.font.getCssFont(e),r),this.html("mjx-utext",{variant:e,style:r},[this.text(t)])},f.prototype.measureTextNode=function(t){var e=this.adaptor;t=e.clone(t);var r=this.html("mjx-measure-text",{},[t]);e.append(e.parent(this.math.start.node),this.container),e.append(this.container,r);var n=e.nodeSize(t,this.math.metrics.em)[0]/this.math.metrics.scale;return e.remove(this.container),e.remove(r),{w:n,h:.75,d:.25}},f.prototype.getFontData=function(t){var e=s.prototype.getFontData.call(this,t);return e[0]="MJXZERO, "+e[0],e},f.NAME="CHTML",f.OPTIONS=o(o({},c.CommonOutputJax.OPTIONS),{adaptiveCSS:!0}),f.commonStyles={'mjx-container [space="1"]':{"margin-left":".111em"},'mjx-container [space="2"]':{"margin-left":".167em"},'mjx-container [space="3"]':{"margin-left":".222em"},'mjx-container [space="4"]':{"margin-left":".278em"},'mjx-container [space="5"]':{"margin-left":".333em"},'mjx-container [rspace="1"]':{"margin-right":".111em"},'mjx-container [rspace="2"]':{"margin-right":".167em"},'mjx-container [rspace="3"]':{"margin-right":".222em"},'mjx-container [rspace="4"]':{"margin-right":".278em"},'mjx-container [rspace="5"]':{"margin-right":".333em"},'mjx-container [size="s"]':{"font-size":"70.7%"},'mjx-container [size="ss"]':{"font-size":"50%"},'mjx-container [size="Tn"]':{"font-size":"60%"},'mjx-container [size="sm"]':{"font-size":"85%"},'mjx-container [size="lg"]':{"font-size":"120%"},'mjx-container [size="Lg"]':{"font-size":"144%"},'mjx-container [size="LG"]':{"font-size":"173%"},'mjx-container [size="hg"]':{"font-size":"207%"},'mjx-container [size="HG"]':{"font-size":"249%"},'mjx-container [width="full"]':{width:"100%"},"mjx-box":{display:"inline-block"},"mjx-block":{display:"block"},"mjx-itable":{display:"inline-table"},"mjx-row":{display:"table-row"},"mjx-row > *":{display:"table-cell"},"mjx-mtext":{display:"inline-block"},"mjx-mstyle":{display:"inline-block"},"mjx-merror":{display:"inline-block",color:"red","background-color":"yellow"},"mjx-mphantom":{visibility:"hidden"}},f.STYLESHEETID="MJX-CHTML-styles",f);function f(t){void 0===t&&(t=null);var e=s.call(this,t,l.CHTMLWrapperFactory,u.TeXFont)||this;return e.font.adaptiveCSS(e.options.adaptiveCSS),e}e.CHTML=h},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),o=this&&this.__assign||function(){return(o=Object.assign||function(t){for(var e,r=1,n=arguments.length;r=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var c,a=r(42),l=r(3),u=r(124),h=r(14),f=r(71),p=(c=a.AbstractOutputJax,i(d,c),d.prototype.typeset=function(t,e){this.setDocument(e);var r=this.createNode();return this.toDOM(t,r,e),r},d.prototype.createNode=function(){var t=this.constructor.NAME;return this.html("mjx-container",{class:"MathJax",jax:t})},d.prototype.setScale=function(t){var e=this.math.metrics.scale*this.options.scale;1!=e&&this.adaptor.setStyle(t,"fontSize",h.percent(e))},d.prototype.toDOM=function(t,e,r){void 0===r&&(r=null),this.setDocument(r),this.math=t,this.pxPerEm=t.metrics.ex/this.font.params.x_height,t.root.setTeXclass(null),this.setScale(e),this.nodeMap=new Map,this.container=e,this.processMath(t.root,e),this.nodeMap=null,this.executeFilters(this.postFilters,t,r,e)},d.prototype.getBBox=function(t,e){this.setDocument(e),(this.math=t).root.setTeXclass(null),this.nodeMap=new Map;var r=this.factory.wrap(t.root).getBBox();return this.nodeMap=null,r},d.prototype.getMetrics=function(t){var e,r;this.setDocument(t);var n=this.adaptor,i=this.getMetricMaps(t);try{for(var o=w(t.math),a=o.next();!a.done;a=o.next()){var s=a.value,c=i[s.display?1:0].get(n.parent(s.start.node)),l=c.em,u=c.ex,h=c.containerWidth,f=c.lineWidth,p=c.scale;s.setMetrics(l,u,h,f,p)}}catch(t){e={error:t}}finally{try{a&&!a.done&&(r=o.return)&&r.call(o)}finally{if(e)throw e.error}}},d.prototype.getMetricsFor=function(t,e){var r=this.getTestElement(t,e),n=this.measureMetrics(r);return this.adaptor.remove(r),n},d.prototype.getMetricMaps=function(t){var e,r,n,i,o,a,s,c,l,u,h=this.adaptor,f=[new Map,new Map];try{for(var p=w(t.math),d=p.next();!d.done;d=p.next()){var m=d.value,y=h.parent(m.start.node),v=f[m.display?1:0];v.has(y)||v.set(y,this.getTestElement(y,m.display))}}catch(t){e={error:t}}finally{try{d&&!d.done&&(r=p.return)&&r.call(p)}finally{if(e)throw e.error}}var b=[new Map,new Map];try{for(var g=w(b.keys()),M=g.next();!M.done;M=g.next()){var O=M.value;try{for(var x=(o=void 0,w(f[O].keys())),S=x.next();!S.done;S=x.next())y=S.value,b[O].set(y,this.measureMetrics(f[O].get(y)))}catch(t){o={error:t}}finally{try{S&&!S.done&&(a=x.return)&&a.call(x)}finally{if(o)throw o.error}}}}catch(t){n={error:t}}finally{try{M&&!M.done&&(i=g.return)&&i.call(g)}finally{if(n)throw n.error}}try{for(var E=w(b.keys()),C=E.next();!C.done;C=E.next()){O=C.value;try{for(var _=(l=void 0,w(f[O].values())),T=_.next();!T.done;T=_.next())y=T.value,h.remove(y)}catch(t){l={error:t}}finally{try{T&&!T.done&&(u=_.return)&&u.call(_)}finally{if(l)throw l.error}}}}catch(t){s={error:t}}finally{try{C&&!C.done&&(c=E.return)&&c.call(E)}finally{if(s)throw s.error}}return b},d.prototype.getTestElement=function(t,e){var r=this.adaptor;if(!this.testInline){this.testInline=this.html("mjx-test",{style:{display:"inline-block",width:"100%","font-style":"normal","font-weight":"normal","font-size":"100%","font-size-adjust":"none","text-indent":0,"text-transform":"none","letter-spacing":"normal","word-spacing":"normal",overflow:"hidden",height:"1px","margin-right":"-1px"}},[this.html("mjx-left-box",{style:{display:"inline-block",width:0,float:"left"}}),this.html("mjx-ex-box",{style:{position:"absolute",overflow:"hidden",width:"1px",height:"60ex"}}),this.html("mjx-right-box",{style:{display:"inline-block",width:0,float:"right"}})]),this.testDisplay=r.clone(this.testInline),r.setStyle(this.testDisplay,"display","table"),r.setStyle(this.testDisplay,"margin-right",""),r.setStyle(r.firstChild(this.testDisplay),"display","none");var n=r.lastChild(this.testDisplay);r.setStyle(n,"display","table-cell"),r.setStyle(n,"width","10000em"),r.setStyle(n,"float","")}return r.append(t,r.clone(e?this.testDisplay:this.testInline))},d.prototype.measureMetrics=function(t){var e=this.adaptor,r=e.fontSize(t),n=e.nodeSize(e.childNode(t,1))[1]/60||r*this.options.exFactor;return{em:r,ex:n,containerWidth:"table"===e.getStyle(t,"display")?e.nodeSize(e.lastChild(t))[0]-1:e.nodeBBox(e.lastChild(t)).left-e.nodeBBox(e.firstChild(t)).left-2,lineWidth:1e6,scale:Math.max(this.options.minScale,this.options.matchFontHeight?n/this.font.params.x_height/r:1)}},d.prototype.styleSheet=function(t){var e,r;this.setDocument(t),this.cssStyles.clear(),this.cssStyles.addStyles(this.constructor.commonStyles);try{for(var n=w(this.factory.getKinds()),i=n.next();!i.done;i=n.next()){var o=i.value;this.addClassStyles(this.factory.getNodeClass(o))}}catch(t){e={error:t}}finally{try{i&&!i.done&&(r=n.return)&&r.call(n)}finally{if(e)throw e.error}}return this.cssStyles.addStyles(this.font.styles),this.html("style",{id:"MJX-styles"},[this.text("\n"+this.cssStyles.cssText+"\n")])},d.prototype.addClassStyles=function(t){this.cssStyles.addStyles(t.styles)},d.prototype.setDocument=function(t){t&&(this.document=t,this.adaptor.document=t.document)},d.prototype.html=function(t,e,r,n){return void 0===e&&(e={}),void 0===r&&(r=[]),this.adaptor.node(t,e,r,n)},d.prototype.text=function(t){return this.adaptor.text(t)},d.prototype.fixed=function(t,e){return void 0===e&&(e=3),Math.abs(t)<6e-4?"0":t.toFixed(e).replace(/\.?0+$/,"")},d.prototype.measureText=function(t,e,r){void 0===r&&(r=["",!1,!1]);var n=this.unknownText(t,e);if("-explicitFont"===e){var i=this.cssFontStyles(r);this.adaptor.setAttributes(n,{style:i})}return this.measureTextNodeWithCache(n,t,e,r)},d.prototype.measureTextNodeWithCache=function(t,e,r,n){void 0===n&&(n=["",!1,!1]),"-explicitFont"===r&&(r=[n[0],n[1]?"T":"F",n[2]?"T":"F",""].join("-")),this.unknownCache.has(r)||this.unknownCache.set(r,new Map);var i=this.unknownCache.get(r),o=i.get(e);if(o)return o;var a=this.measureTextNode(t);return i.set(e,a),a},d.prototype.cssFontStyles=function(t,e){void 0===e&&(e={});var r=s(t,3),n=r[0],i=r[1],o=r[2];return e["font-family"]=n,i&&(e["font-style"]="italic"),o&&(e["font-weight"]="bold"),e},d.prototype.getFontData=function(t){return[(t=t||new f.Styles).get("font-family"),"italic"===t.get("font-style"),"bold"===t.get("font-weight")]},d.NAME="Common",d.OPTIONS=o(o({},a.AbstractOutputJax.OPTIONS),{scale:1,minScale:.5,matchFontHeight:!0,mtextInheritFont:!1,merrorInheritFont:!0,mathmlSpacing:!1,skipAttributes:{},exFactor:.5,displayAlign:"center",displayIndent:"0",wrapperFactory:null,font:null,cssStyles:null}),d.commonStyles={},d);function d(t,e,r){void 0===t&&(t=null),void 0===e&&(e=null),void 0===r&&(r=null);var n=this,i=s(l.separateOptions(t,r.OPTIONS),2),o=i[0],a=i[1];return(n=c.call(this,o)||this).factory=n.options.wrapperFactory||new e,(n.factory.jax=n).cssStyles=n.options.cssStyles||new u.CssStyles,n.font=n.options.font||new r(a),n.unknownCache=new Map,n}e.CommonOutputJax=p},function(t,e,r){"use strict";var l=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var n=(Object.defineProperty(i.prototype,"cssText",{get:function(){return this.getStyleString()},enumerable:!0,configurable:!0}),i.prototype.addStyles=function(t){var e,r;if(t)try{for(var n=l(Object.keys(t)),i=n.next();!i.done;i=n.next()){var o=i.value;this.styles[o]||(this.styles[o]={}),Object.assign(this.styles[o],t[o])}}catch(t){e={error:t}}finally{try{i&&!i.done&&(r=n.return)&&r.call(n)}finally{if(e)throw e.error}}},i.prototype.removeStyles=function(){for(var e,t,r=[],n=0;n=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},o=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var f,o=r(2),d=r(135),a=r(26),p=r(129),s=(f=d.CommonMoMixin(o.CHTMLWrapper),i(c,f),c.prototype.toCHTML=function(t){var e,r,n=this.node.attributes,i=n.get("symmetric")&&2!==this.stretch.dir,o=0!==this.stretch.dir;o&&null===this.size&&this.getStretchedVariant([]);var a=this.standardCHTMLnode(t);if(this.noIC&&this.adaptor.setAttribute(a,"noIC","true"),o&&this.size<0)this.stretchHTML(a,i);else{if(i||n.get("largeop")){var s=p.BBox.empty();f.prototype.computeBBox.call(this,s);var c=this.em((s.d-s.h)/2+this.font.params.axis_height);"0"!==c&&this.adaptor.setStyle(a,"verticalAlign",c)}try{for(var l=h(this.childNodes),u=l.next();!u.done;u=l.next())u.value.toCHTML(a)}catch(t){e={error:t}}finally{try{u&&!u.done&&(r=l.return)&&r.call(l)}finally{if(e)throw e.error}}}},c.prototype.stretchHTML=function(t,e){var r=this.getText().charCodeAt(0),n=this.stretch;n.used=!0;var i=n.stretch,o=[];i[0]&&o.push(this.html("mjx-beg",{},[this.html("mjx-c")])),o.push(this.html("mjx-ext",{},[this.html("mjx-c")])),4===i.length&&o.push(this.html("mjx-mid",{},[this.html("mjx-c")]),this.html("mjx-ext",{},[this.html("mjx-c")])),i[2]&&o.push(this.html("mjx-end",{},[this.html("mjx-c")]));var a={},s=this.bbox,c=s.h,l=s.d,u=s.w;1===n.dir?(o.push(this.html("mjx-mark")),a.height=this.em(c+l),a.verticalAlign=this.em(-l)):a.width=this.em(u);var h=d.DirectionVH[n.dir],f={class:this.char(n.c||r),style:a},p=this.html("mjx-stretchy-"+h,f,o);this.adaptor.append(t,p)},c.kind=a.MmlMo.prototype.kind,c.styles={"mjx-stretchy-h":{display:"inline-table",width:"100%"},"mjx-stretchy-h > *":{display:"table-cell",width:0},"mjx-stretchy-h > * > mjx-c":{display:"inline-block"},"mjx-stretchy-h > * > mjx-c::before":{padding:".001em 0",width:"initial"},"mjx-stretchy-h > mjx-ext":{overflow:"hidden",width:"100%"},"mjx-stretchy-h > mjx-ext > mjx-c::before":{transform:"scalex(500)"},"mjx-stretchy-h > mjx-ext > mjx-c":{width:0},"mjx-stretchy-h > mjx-beg > mjx-c":{"margin-right":"-.1em"},"mjx-stretchy-h > mjx-end > mjx-c":{"margin-left":"-.1em"},"mjx-stretchy-v":{display:"inline-block"},"mjx-stretchy-v > *":{display:"block"},"mjx-stretchy-v > mjx-beg":{height:0},"mjx-stretchy-v > mjx-end > mjx-c":{display:"block"},"mjx-stretchy-v > * > mjx-c":{transform:"scale(1)","transform-origin":"left center",overflow:"hidden"},"mjx-stretchy-v > mjx-ext":{display:"block",height:"100%","box-sizing":"border-box",border:"0px solid transparent",overflow:"hidden"},"mjx-stretchy-v > mjx-ext > mjx-c::before":{width:"initial"},"mjx-stretchy-v > mjx-ext > mjx-c":{transform:"scaleY(500) translateY(.1em)",overflow:"visible"},"mjx-mark":{display:"inline-block",height:"0px"}},c);function c(){return null!==f&&f.apply(this,arguments)||this}e.CHTMLmo=s},function(t,e,r){"use strict";var n,i,o=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),m=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var s=r(23);e.DirectionVH=((i={})[1]="v",i[2]="h",i),e.CommonMoMixin=function(t){return o(e,i=t),e.prototype.computeBBox=function(t,e){void 0===e&&(e=!1);var r=0!==this.stretch.dir;if(r&&null===this.size&&this.getStretchedVariant([0]),!(r&&this.size<0)&&(i.prototype.computeBBox.call(this,t),this.copySkewIC(t),this.noIC&&(t.w-=t.ic),this.node.attributes.get("symmetric")&&2!==this.stretch.dir)){var n=(t.h+t.d)/2+this.font.params.axis_height-t.h;t.h+=n,t.d-=n}},e.prototype.getVariant=function(){this.node.attributes.get("largeop")?this.variant=this.node.attributes.get("displaystyle")?"-largeop":"-smallop":i.prototype.getVariant.call(this)},e.prototype.canStretch=function(t){if(0!==this.stretch.dir)return this.stretch.dir===t;if(!this.node.attributes.get("stretchy"))return!1;var e=this.getText();if(1!==e.length)return!1;var r=this.font.getDelimiter(e.charCodeAt(0));return this.stretch=r&&r.dir===t?r:s.NOSTRETCH,0!==this.stretch.dir},e.prototype.getStretchedVariant=function(t,e){var r,n;if(void 0===e&&(e=!1),0!==this.stretch.dir){var i=this.getWH(t),o=this.getSize("minsize",0),a=this.getSize("maxsize",1/0);i=Math.max(o,Math.min(a,i));var s=o||e?i:Math.max(i*this.font.params.delimiterfactor/1e3,i-this.font.params.delimitershortfall),c=this.stretch,l=c.c||this.getText().charCodeAt(0),u=0;if(c.sizes)try{for(var h=p(c.sizes),f=h.next();!f.done;f=h.next()){if(s<=f.value)return this.variant=this.font.getSizeVariant(l,u),void(this.size=u);u++}}catch(t){r={error:t}}finally{try{f&&!f.done&&(n=h.return)&&n.call(h)}finally{if(r)throw r.error}}c.stretch?(this.size=-1,this.invalidateBBox(),this.getStretchBBox(t,i,c)):(this.variant=this.font.getSizeVariant(l,u-1),this.size=u-1)}},e.prototype.getSize=function(t,e){var r=this.node.attributes;return r.isSet(t)&&(e=this.length2em(r.get(t),1,1)),e},e.prototype.getWH=function(t){if(0===t.length)return 0;if(1===t.length)return t[0];var e=m(t,2),r=e[0],n=e[1],i=this.font.params.axis_height;return this.node.attributes.get("symmetric")?2*Math.max(r-i,n+i):r+n},e.prototype.getStretchBBox=function(t,e,r){var n;r.hasOwnProperty("min")&&r.min>e&&(e=r.min);var i=m(r.HDW,3),o=i[0],a=i[1],s=i[2];1===this.stretch.dir?(o=(n=m(this.getBaseline(t,e,r),2))[0],a=n[1]):s=e,this.bbox.h=o,this.bbox.d=a,this.bbox.w=s},e.prototype.getBaseline=function(t,e,r){var n=2===t.length&&t[0]+t[1]===e,i=this.node.attributes.get("symmetric"),o=m(n?t:[e,0],2),a=o[0],s=o[1],c=m([a+s,0],2),l=c[0],u=c[1];if(i){var h=this.font.params.axis_height;n&&(l=2*Math.max(a-h,s+h)),u=l/2-h}else if(n)u=s;else{var f=m(r.HDW||[.75,.25],2),p=f[0],d=f[1];u=d*(l/(p+d))}return[l-u,u]},e.prototype.remapChars=function(t){if(1==t.length){var e=this.node.parent,r=this.isAccent&&(e===this.node.coreParent()||e.isEmbellished)?"accent":"mo",n=this.font.getRemappedChar(r,t[0]);n&&(t=this.unicodeChars(n))}return t},e;function e(){for(var t=[],e=0;e=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(2),s=r(145),c=r(56),l=(o=s.CommonMpaddedMixin(a.CHTMLWrapper),i(u,o),u.prototype.toCHTML=function(t){var e,r,n=this.standardCHTMLnode(t),i=[],o={},a=v(this.getDimens(),9),s=(a[0],a[1],a[2]),c=a[3],l=a[4],u=a[5],h=a[6],f=a[7],p=a[8];if(u&&(o.width=this.em(s+u)),(c||l)&&(o.margin=this.em(c)+" 0 "+this.em(l)),h+p||f){o.position="relative";var d=this.html("mjx-rbox",{style:{left:this.em(h+p),top:this.em(-f)}});h+p&&this.childNodes[0].getBBox().pwidth&&(this.adaptor.setAttribute(d,"width","full"),this.adaptor.setStyle(d,"left",this.em(h))),i.push(d)}n=this.adaptor.append(n,this.html("mjx-block",{style:o},i));try{for(var m=b(this.childNodes),y=m.next();!y.done;y=m.next())y.value.toCHTML(i[0]||n)}catch(t){e={error:t}}finally{try{y&&!y.done&&(r=m.return)&&r.call(m)}finally{if(e)throw e.error}}},u.kind=c.MmlMpadded.prototype.kind,u.styles={"mjx-mpadded":{display:"inline-block"},"mjx-rbox":{display:"inline-block",position:"relative"}},u);function u(){return null!==o&&o.apply(this,arguments)||this}e.CHTMLmpadded=l},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),l=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},m=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0 mjx-dstrike":{display:"inline-block",left:0,top:0,position:"absolute","border-top":b.SOLID,"transform-origin":"top left"},"mjx-menclose > mjx-ustrike":{display:"inline-block",left:0,bottom:0,position:"absolute","border-top":b.SOLID,"transform-origin":"bottom left"},"mjx-menclose > mjx-hstrike":{"border-top":b.SOLID,position:"absolute",left:0,right:0,bottom:"50%",transform:"translateY("+c.em(b.THICKNESS/2)+")"},"mjx-menclose > mjx-vstrike":{"border-left":b.SOLID,position:"absolute",top:0,bottom:0,right:"50%",transform:"translateX("+c.em(b.THICKNESS/2)+")"},"mjx-menclose > mjx-rbox":{position:"absolute",top:0,bottom:0,right:0,left:0,border:b.SOLID,"border-radius":c.em(b.THICKNESS+b.PADDING)},"mjx-menclose > mjx-cbox":{position:"absolute",top:0,bottom:0,right:0,left:0,border:b.SOLID,"border-radius":"50%"},"mjx-menclose > mjx-arrow":{position:"absolute",left:0,bottom:"50%",height:0,width:0},"mjx-menclose > mjx-arrow > *":{display:"block",position:"absolute","transform-origin":"bottom","border-left":c.em(b.THICKNESS*b.ARROWX)+" solid","border-right":0,"box-sizing":"border-box"},"mjx-menclose > mjx-arrow > mjx-aline":{left:0,top:c.em(-b.THICKNESS/2),right:c.em(b.THICKNESS*(b.ARROWX-1)),height:0,"border-top":c.em(b.THICKNESS)+" solid","border-left":0},"mjx-menclose > mjx-arrow[double] > mjx-aline":{left:c.em(b.THICKNESS*(b.ARROWX-1)),height:0},"mjx-menclose > mjx-arrow > mjx-rthead":{transform:"skewX("+u+"rad)",right:0,bottom:"-1px","border-bottom":"1px solid transparent","border-top":c.em(b.THICKNESS*b.ARROWY)+" solid transparent"},"mjx-menclose > mjx-arrow > mjx-rbhead":{transform:"skewX(-"+u+"rad)","transform-origin":"top",right:0,top:"-1px","border-top":"1px solid transparent","border-bottom":c.em(b.THICKNESS*b.ARROWY)+" solid transparent"},"mjx-menclose > mjx-arrow > mjx-lthead":{transform:"skewX(-"+u+"rad)",left:0,bottom:"-1px","border-left":0,"border-right":c.em(b.THICKNESS*b.ARROWX)+" solid","border-bottom":"1px solid transparent","border-top":c.em(b.THICKNESS*b.ARROWY)+" solid transparent"},"mjx-menclose > mjx-arrow > mjx-lbhead":{transform:"skewX("+u+"rad)","transform-origin":"top",left:0,top:"-1px","border-left":0,"border-right":c.em(b.THICKNESS*b.ARROWX)+" solid","border-top":"1px solid transparent","border-bottom":c.em(b.THICKNESS*b.ARROWY)+" solid transparent"},"mjx-menclose > dbox":{position:"absolute",top:0,bottom:0,left:c.em(-1.5*b.PADDING),width:c.em(3*b.PADDING),border:c.em(b.THICKNESS)+" solid","border-radius":"50%","clip-path":"inset(0 0 0 "+c.em(1.5*b.PADDING)+")","box-sizing":"border-box"}},f.notations=new Map([b.Border("top"),b.Border("right"),b.Border("bottom"),b.Border("left"),b.Border2("actuarial","top","right"),b.Border2("madruwb","bottom","right"),b.DiagonalStrike("up",1),b.DiagonalStrike("down",-1),["horizontalstrike",{renderer:b.RenderElement("hstrike","Y"),bbox:function(t){return[0,t.padding,0,t.padding]}}],["verticalstrike",{renderer:b.RenderElement("vstrike","X"),bbox:function(t){return[t.padding,0,t.padding,0]}}],["box",{renderer:function(t,e){t.adaptor.setStyle(e,"border",t.em(t.thickness)+" solid")},bbox:b.fullBBox,border:b.fullBorder,remove:"left right top bottom"}],["roundedbox",{renderer:b.RenderElement("rbox"),bbox:b.fullBBox}],["circle",{renderer:b.RenderElement("cbox"),bbox:b.fullBBox}],["phasorangle",{renderer:function(t,e){var r=t.getBBox(),n=(r.w,r.h),i=r.d,o=m(t.getArgMod(1.75*t.padding,n+i),2),a=o[0],s=o[1],c=t.thickness*Math.sin(a)*.9;t.adaptor.setStyle(e,"border-bottom",t.em(t.thickness)+" solid");var l=t.adjustBorder(t.html("mjx-ustrike",{style:{width:t.em(s),transform:"translateX("+t.em(c)+") rotate("+t.fixed(-a)+"rad)"}}));t.adaptor.append(t.chtml,l)},bbox:function(t){var e=t.padding/2,r=t.thickness;return[2*e,e,e+r,3*e+r]},border:function(t){return[0,0,t.thickness,0]},remove:"bottom"}],b.Arrow("up"),b.Arrow("down"),b.Arrow("left"),b.Arrow("right"),b.Arrow("updown"),b.Arrow("leftright"),b.DiagonalArrow("updiagonal"),b.DiagonalArrow("northeast"),b.DiagonalArrow("southeast"),b.DiagonalArrow("northwest"),b.DiagonalArrow("southwest"),b.DiagonalArrow("northeastsouthwest"),b.DiagonalArrow("northwestsoutheast"),["longdiv",{renderer:function(t,e){var r=t.adaptor;r.setStyle(e,"border-top",t.em(t.thickness)+" solid");var n=r.append(t.chtml,t.html("dbox")),i=t.thickness,o=t.padding;i!==b.THICKNESS&&r.setStyle(n,"border-width",t.em(i)),o!==b.PADDING&&(r.setStyle(n,"left",t.em(-1.5*o)),r.setStyle(n,"width",t.em(3*o)),r.setStyle(n,"clip-path","inset(0 0 0 "+t.em(1.5*o)+")"))},bbox:function(t){var e=t.padding,r=t.thickness;return[e+r,e,e,2*e+r/2]}}],["radical",{renderer:function(e,t){e.msqrt.toCHTML(t);var r=e.sqrtTRBL();e.adaptor.setStyle(e.msqrt.chtml,"margin",r.map(function(t){return e.em(-t)}).join(" "))},init:function(t){t.msqrt=t.createMsqrt(t.childNodes[0])},bbox:function(t){return t.sqrtTRBL()},renderChild:!0}]]),f);function f(){return null!==l&&l.apply(this,arguments)||this}e.CHTMLmenclose=h},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),f=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var c=r(36),l=r(10);e.CommonMencloseMixin=function(t){return i(e,n=t),e.prototype.getParameters=function(){var t=this.node.attributes,e=t.get("data-padding");void 0!==e&&(this.padding=this.length2em(e,c.PADDING));var r=t.get("data-thickness");void 0!==r&&(this.thickness=this.length2em(r,c.THICKNESS));var n=t.get("data-arrowhead");if(void 0!==n){var i=f(l.split(n),3),o=i[0],a=i[1],s=i[2];this.arrowhead={x:o?parseFloat(o):c.ARROWX,y:a?parseFloat(a):c.ARROWY,dx:s?parseFloat(s):c.ARROWDX}}},e.prototype.getNotations=function(){var e,t,r=this.constructor.notations;try{for(var n=h(l.split(this.node.attributes.get("notation"))),i=n.next();!i.done;i=n.next()){var o=i.value,a=r.get(o);a&&(this.notations[o]=a).renderChild&&(this.renderChild=a.renderer)}}catch(t){e={error:t}}finally{try{i&&!i.done&&(t=n.return)&&t.call(n)}finally{if(e)throw e.error}}},e.prototype.removeRedundantNotations=function(){var e,t,r,n;try{for(var i=h(Object.keys(this.notations)),o=i.next();!o.done;o=i.next()){var a=o.value;if(this.notations[a]){var s=this.notations[a].remove||"";try{for(var c=(r=void 0,h(s.split(/ /))),l=c.next();!l.done;l=c.next()){var u=l.value;delete this.notations[u]}}catch(t){r={error:t}}finally{try{l&&!l.done&&(n=c.return)&&n.call(c)}finally{if(r)throw r.error}}}}}catch(t){e={error:t}}finally{try{o&&!o.done&&(t=i.return)&&t.call(i)}finally{if(e)throw e.error}}},e.prototype.initializeNotations=function(){var e,t;try{for(var r=h(Object.keys(this.notations)),n=r.next();!n.done;n=r.next()){var i=n.value,o=this.notations[i].init;o&&o(this)}}catch(t){e={error:t}}finally{try{n&&!n.done&&(t=r.return)&&t.call(r)}finally{if(e)throw e.error}}},e.prototype.computeBBox=function(t,e){void 0===e&&(e=!1);var r=f(this.getBBoxExtenders(),4),n=r[0],i=r[1],o=r[2],a=r[3],s=this.childNodes[0].getBBox();t.combine(s,a,0),t.h+=n,t.d+=o,t.w+=i,this.setChildPWidths(e)},e.prototype.getBBoxExtenders=function(){var e,t,r=[0,0,0,0];try{for(var n=h(Object.keys(this.notations)),i=n.next();!i.done;i=n.next()){var o=i.value;this.maximizeEntries(r,this.notations[o].bbox(this))}}catch(t){e={error:t}}finally{try{i&&!i.done&&(t=n.return)&&t.call(n)}finally{if(e)throw e.error}}return r},e.prototype.getPadding=function(){var e,t,r=[0,0,0,0],n=[0,0,0,0];try{for(var i=h(Object.keys(this.notations)),o=i.next();!o.done;o=i.next()){var a=o.value;this.maximizeEntries(r,this.notations[a].bbox(this));var s=this.notations[a].border;s&&this.maximizeEntries(n,s(this))}}catch(t){e={error:t}}finally{try{o&&!o.done&&(t=i.return)&&t.call(i)}finally{if(e)throw e.error}}return[0,1,2,3].map(function(t){return r[t]-n[t]})},e.prototype.maximizeEntries=function(t,e){for(var r=0;r=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(2),s=r(73),c=r(73),u=r(52),h=(o=s.CommonMrowMixin(a.CHTMLWrapper),i(f,o),f.prototype.toCHTML=function(t){var e,r,n=this.node.isInferred?this.chtml=t:this.standardCHTMLnode(t),i=!1;try{for(var o=l(this.childNodes),a=o.next();!a.done;a=o.next()){var s=a.value;s.toCHTML(n),s.bbox.w<0&&(i=!0)}}catch(t){e={error:t}}finally{try{a&&!a.done&&(r=o.return)&&r.call(o)}finally{if(e)throw e.error}}if(i){var c=this.getBBox().w;c&&(this.adaptor.setStyle(n,"width",this.em(Math.max(0,c))),c<0&&this.adaptor.setStyle(n,"marginRight",this.em(c)))}},f.kind=u.MmlMrow.prototype.kind,f);function f(){return null!==o&&o.apply(this,arguments)||this}e.CHTMLmrow=h;var p,d=(p=c.CommonInferredMrowMixin(h),i(m,p),m.kind=u.MmlInferredMrow.prototype.kind,m);function m(){return null!==p&&p.apply(this,arguments)||this}e.CHTMLinferredMrow=d},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)});Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(2),s=r(151),c=r(57),l=(o=s.CommonMfencedMixin(a.CHTMLWrapper),i(u,o),u.prototype.toCHTML=function(t){var e=this.standardCHTMLnode(t);this.mrow.toCHTML(e)},u.kind=c.MmlMfenced.prototype.kind,u);function u(){return null!==o&&o.apply(this,arguments)||this}e.CHTMLmfenced=l},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),o=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0}),e.CommonMfencedMixin=function(t){return i(e,n=t),e.prototype.createMrow=function(){var t=this.node.factory.create("inferredMrow");t.inheritAttributesFrom(this.node),this.mrow=this.wrap(t),this.mrow.parent=this},e.prototype.addMrowChildren=function(){var e,t,r=this.node,n=this.mrow;this.addMo(r.open),this.childNodes.length&&n.childNodes.push(this.childNodes[0]);var i=0;try{for(var o=c(this.childNodes.slice(1)),a=o.next();!a.done;a=o.next()){var s=a.value;this.addMo(r.separators[i++]),n.childNodes.push(s)}}catch(t){e={error:t}}finally{try{a&&!a.done&&(t=o.return)&&t.call(o)}finally{if(e)throw e.error}}this.addMo(r.close),n.stretchChildren()},e.prototype.addMo=function(t){if(t){var e=this.wrap(t);this.mrow.childNodes.push(e),e.parent=this.mrow}},e.prototype.computeBBox=function(t,e){void 0===e&&(e=!1),t.updateFrom(this.mrow.getBBox()),this.setChildPWidths(e)},e;function e(){for(var t=[],e=0;e *":{"font-size":"2000%"},"mjx-dbox":{display:"block","font-size":"5%"},"mjx-num":{display:"block","text-align":"center"},"mjx-den":{display:"block","text-align":"center"},"mjx-mfrac[bevelled] > mjx-num":{display:"inline-block"},"mjx-mfrac[bevelled] > mjx-den":{display:"inline-block"},'mjx-den[align="right"], mjx-num[align="right"]':{"text-align":"right"},'mjx-den[align="left"], mjx-num[align="left"]':{"text-align":"left"},"mjx-nstrut":{display:"inline-block",height:".054em",width:0,"vertical-align":"-.054em"},'mjx-nstrut[type="d"]':{height:".217em","vertical-align":"-.217em"},"mjx-dstrut":{display:"inline-block",height:".505em",width:0},'mjx-dstrut[type="d"]':{height:".726em"},"mjx-line":{display:"block","box-sizing":"border-box","min-height":"1px",height:".06em","border-top":".06em solid",margin:".06em -.1em",overflow:"hidden"},'mjx-line[type="d"]':{margin:".18em -.1em"}},u);function u(){return null!==o&&o.apply(this,arguments)||this}e.CHTMLmfrac=l},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),l=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0this.surdH?(t.h+t.d-(this.surdH-e))/2:e+r/4]},e.prototype.getRootDimens=function(t){return[0,0,0,0]},e;function e(){for(var t=[],e=0;e=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var a,s=r(2),c=r(158),l=(a=c.CommonScriptbaseMixin(s.CHTMLWrapper),i(h,a),h.prototype.toCHTML=function(t){this.chtml=this.standardCHTMLnode(t);var e=o(this.getOffset(this.baseChild.getBBox(),this.script.getBBox()),2),r=e[0],n=e[1],i={"vertical-align":this.em(n)};r&&(i["margin-left"]=this.em(r)),this.baseChild.toCHTML(this.chtml),this.script.toCHTML(this.adaptor.append(this.chtml,this.html("mjx-script",{style:i})))},h.prototype.setDeltaW=function(t,e){for(var r=0;r=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var a=1.5;e.CommonScriptbaseMixin=function(t){var e,i;return o(r,i=t),Object.defineProperty(r.prototype,"baseChild",{get:function(){return this.childNodes[this.node.base]},enumerable:!0,configurable:!0}),Object.defineProperty(r.prototype,"script",{get:function(){return this.childNodes[1]},enumerable:!0,configurable:!0}),r.prototype.computeBBox=function(t,e){void 0===e&&(e=!1);var r=this.baseChild.getBBox(),n=this.script.getBBox(),i=s(this.getOffset(r,n),2),o=i[0],a=i[1];t.append(r),t.combine(n,t.w+o,a),t.w+=this.font.params.scriptspace,t.clean(),this.setChildPWidths(e)},r.prototype.coreIC=function(){var t=this.baseCore.getBBox();return t.ic?1.2*t.ic+.05:0},r.prototype.isCharBase=function(){var t=this.baseChild;return(t.node.isKind("mstyle")||t.node.isKind("mrow"))&&1===t.childNodes.length&&(t=t.childNodes[0]),(t.node.isKind("mo")||t.node.isKind("mi")||t.node.isKind("mn"))&&1===t.bbox.rscale&&1===t.getText().length&&!t.node.attributes.get("largeop")},r.prototype.getOffset=function(t,e){return[0,0]},r.prototype.getV=function(t,e){var r=this.font.params,n=this.length2em(this.node.attributes.get("subscriptshift"),r.sub1);return Math.max(this.isCharBase()?0:t.d+r.sub_drop*e.rscale,n,e.h*e.rscale-.8*r.x_height)},r.prototype.getU=function(t,e){var r=this.font.params,n=this.node.attributes.getList("displaystyle","texprimestyle","superscriptshift"),i=n.displaystyle?r.sup1:n.texprimestyle?r.sup3:r.sup2,o=this.length2em(n.superscriptshift,i);return Math.max(this.isCharBase()?0:t.h-r.sup_drop*e.rscale,o,e.d*e.rscale+.25*r.x_height)},r.prototype.hasMovableLimits=function(){return!this.node.attributes.get("displaystyle")&&(this.node.getProperty("movablelimits")||this.node.attributes.get("movablelimits")||this.baseChild.coreMO().node.attributes.get("movablelimits"))},r.prototype.getOverKU=function(t,e){var r=this.node.attributes.get("accent"),n=this.font.params,i=e.d*e.rscale,o=(r?n.rule_thickness:Math.max(n.big_op_spacing1,n.big_op_spacing3-Math.max(0,i)))-(this.baseChild.node.isKind("munderover")?.1:0);return[o,t.h*t.rscale+o+i]},r.prototype.getUnderKV=function(t,e){var r=this.node.attributes.get("accentunder"),n=this.font.params,i=e.h*e.rscale,o=(r?n.rule_thickness:Math.max(n.big_op_spacing2,n.big_op_spacing4-i))-(this.baseChild.node.isKind("munderover")?.1:0);return[o,-(t.d*t.rscale+o+i)]},r.prototype.getDeltaW=function(t,e){var r,n,i,o;void 0===e&&(e=[0,0,0]);var a=this.node.attributes.get("align"),s=t.map(function(t){return t.w*t.rscale}),c=Math.max.apply(Math,y(s)),l=[],u=0;try{for(var h=x(s.keys()),f=h.next();!f.done;f=h.next())l[m=f.value]=("center"===a?(c-s[m])/2:"right"===a?c-s[m]:0)+e[m],l[m] mjx-row":{"text-align":"left"},"mjx-under":{"padding-bottom":".1em"}},f);function f(){return null!==c&&c.apply(this,arguments)||this}e.CHTMLmunder=h;var d,m=(d=s.CommonMoverMixin(o.CHTMLmsup),i(y,d),y.prototype.toCHTML=function(t){if(this.hasMovableLimits())return d.prototype.toCHTML.call(this,t),void this.adaptor.setAttribute(this.chtml,"limits","false");this.chtml=this.standardCHTMLnode(t);var e=this.adaptor.append(this.chtml,this.html("mjx-over")),r=this.adaptor.append(this.chtml,this.html("mjx-base"));this.script.toCHTML(e),this.baseChild.toCHTML(r);var n=this.script.getBBox(),i=this.baseChild.getBBox(),o=p(this.getOverKU(i,n),2),a=o[0],s=(o[1],this.getDelta());this.adaptor.setStyle(e,"paddingBottom",this.em(a)),this.setDeltaW([r,e],this.getDeltaW([i,n],[0,s])),this.adjustOverDepth(e,n)},y.kind=u.MmlMover.prototype.kind,y.useIC=!0,y.styles={'mjx-mover:not([limits="false"])':{"padding-top":".1em"},'mjx-mover:not([limits="false"]) > *':{display:"block","text-align":"left"}},y);function y(){return null!==d&&d.apply(this,arguments)||this}e.CHTMLmover=m;var v,b=(v=l.CommonMunderoverMixin(o.CHTMLmsubsup),i(g,v),g.prototype.toCHTML=function(t){if(this.hasMovableLimits())return v.prototype.toCHTML.call(this,t),void this.adaptor.setAttribute(this.chtml,"limits","false");this.chtml=this.standardCHTMLnode(t);var e=this.adaptor.append(this.chtml,this.html("mjx-over")),r=this.adaptor.append(this.adaptor.append(this.chtml,this.html("mjx-box")),this.html("mjx-munder")),n=this.adaptor.append(this.adaptor.append(r,this.html("mjx-row")),this.html("mjx-base")),i=this.adaptor.append(this.adaptor.append(r,this.html("mjx-row")),this.html("mjx-under"));this.overChild.toCHTML(e),this.baseChild.toCHTML(n),this.underChild.toCHTML(i);var o=this.overChild.getBBox(),a=this.baseChild.getBBox(),s=this.underChild.getBBox(),c=p(this.getOverKU(a,o),2),l=c[0],u=(c[1],p(this.getUnderKV(a,s),2)),h=u[0],f=(u[1],this.getDelta());this.adaptor.setStyle(e,"paddingBottom",this.em(l)),this.adaptor.setStyle(i,"paddingTop",this.em(h)),this.setDeltaW([n,i,e],this.getDeltaW([a,s,o],[0,-f,f])),this.adjustOverDepth(e,o),this.adjustUnderDepth(i,s)},g.kind=u.MmlMunderover.prototype.kind,g.useIC=!0,g.styles={'mjx-munderover:not([limits="false"])':{"padding-top":".1em"},'mjx-munderover:not([limits="false"]) > *':{display:"block"}},g);function g(){return null!==v&&v.apply(this,arguments)||this}e.CHTMLmunderover=b},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),c=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0 mjx-row > mjx-cell":{"text-align":"right"}},h);function h(){return null!==o&&o.apply(this,arguments)||this}e.CHTMLmmultiscripts=u},function(t,s,e){"use strict";var n,r=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),d=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(s,"__esModule",{value:!0});var i=e(16);s.NextScript={base:"subList",subList:"supList",supList:"subList",psubList:"psupList",psupList:"psubList"},s.ScriptNames=["sup","sup","psup","psub"],s.CommonMmultiscriptsMixin=function(t){return r(e,c=t),e.prototype.combinePrePost=function(t,e){var r=new i.BBox(t);return r.combine(e,0,0),r},e.prototype.computeBBox=function(t,e){void 0===e&&(e=!1);var r=this.font.params.scriptspace,n=this.getScriptData(),i=this.combinePrePost(n.sub,n.psub),o=this.combinePrePost(n.sup,n.psup),a=d(this.getUVQ(n.base,i,o),2),s=a[0],c=a[1];if(t.empty(),n.numPrescripts&&(t.combine(n.psup,r,s),t.combine(n.psub,r,c)),t.append(n.base),n.numScripts){var l=t.w;t.combine(n.sup,l,s),t.combine(n.sub,l,c),t.w+=r}t.clean(),this.setChildPWidths(e)},e.prototype.getScriptData=function(){if(this.scriptData)return this.scriptData;var t=this.scriptData={base:null,sub:i.BBox.empty(),sup:i.BBox.empty(),psub:i.BBox.empty(),psup:i.BBox.empty(),numPrescripts:0,numScripts:0},e=this.getScriptBBoxLists();return this.combineBBoxLists(t.sub,t.sup,e.subList,e.supList),this.combineBBoxLists(t.psub,t.psup,e.psubList,e.psupList),this.scriptData.base=e.base[0],this.scriptData.numPrescripts=e.psubList.length,this.scriptData.numScripts=e.subList.length,this.scriptData},e.prototype.getScriptBBoxLists=function(){var e,t,r={base:[],subList:[],supList:[],psubList:[],psupList:[]},n="base";try{for(var i=l(this.childNodes),o=i.next();!o.done;o=i.next()){var a=o.value;n=a.node.isKind("mprescripts")?"psubList":(r[n].push(a.getBBox()),s.NextScript[n])}}catch(t){e={error:t}}finally{try{o&&!o.done&&(t=i.return)&&t.call(i)}finally{if(e)throw e.error}}return this.firstPrescript=r.subList.length+r.supList.length+2,this.padLists(r.subList,r.supList),this.padLists(r.psubList,r.psupList),r},e.prototype.padLists=function(t,e){t.length>e.length&&e.push(i.BBox.empty())},e.prototype.combineBBoxLists=function(t,e,r,n){for(var i=0;it.h&&(t.h=s),c>t.d&&(t.d=c),h>e.h&&(e.h=h),f>e.d&&(e.d=f)}},e.prototype.getScaledWHD=function(t){var e=t.w,r=t.h,n=t.d,i=t.rscale;return[e*i,r*i,n*i]},e.prototype.getUVQ=function(t,e,r){var n;if(!this.UVQ){var i=d([0,0,0],3),o=i[0],a=i[1],s=i[2];0===e.h&&0===e.d?o=this.getU(t,r):0===r.h&&0===r.d?o=-this.getV(t,e):(o=(n=d(c.prototype.getUVQ.call(this,t,e,r),3))[0],a=n[1],s=n[2]),this.UVQ=[o,a,s]}return this.UVQ},e;function e(){var t=null!==c&&c.apply(this,arguments)||this;return t.scriptData=null,t.firstPrescript=0,t}var c}},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),y=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},u=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0 mjx-itable":{"vertical-align":"middle","text-align":"left","box-sizing":"border-box"},"mjx-labels > mjx-itable":{position:"absolute",top:0},'mjx-mtable[justify="left"]':{"text-align":"left"},'mjx-mtable[justify="right"]':{"text-align":"right"},'mjx-mtable[justify="left"][side="left"]':{"padding-right":"0 ! important"},'mjx-mtable[justify="left"][side="right"]':{"padding-left":"0 ! important"},'mjx-mtable[justify="right"][side="left"]':{"padding-right":"0 ! important"},'mjx-mtable[justify="right"][side="right"]':{"padding-left":"0 ! important"},"mjx-mtable[align]":{"vertical-align":"baseline"},'mjx-mtable[align="top"] > mjx-table':{"vertical-align":"top"},'mjx-mtable[align="bottom"] > mjx-table':{"vertical-align":"bottom"},'mjx-mtable[align="center"] > mjx-table':{"vertical-align":"middle"},'mjx-mtable[align="baseline"] > mjx-table':{"vertical-align":"middle"}},f);function f(t,e,r){void 0===r&&(r=null);var n=o.call(this,t,e,r)||this;return n.itable=n.html("mjx-itable"),n.labels=n.html("mjx-itable"),n}e.CHTMLmtable=l},function(t,e,r){"use strict";var n,o=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),y=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var s=r(16),v=r(10),b=r(104);e.CommonMtableMixin=function(t){return o(e,i=t),Object.defineProperty(e.prototype,"tableRows",{get:function(){return this.childNodes},enumerable:!0,configurable:!0}),e.prototype.findContainer=function(){for(var t=this,e=t.parent;e&&(e.node.notParent||e.node.isKind("mrow"));)e=(t=e).parent;this.container=e,this.containerI=t.node.childPosition()},e.prototype.getPercentageWidth=function(){if(this.hasLabels)this.bbox.pwidth=s.BBox.fullWidth;else{var t=this.node.attributes.get("width");v.isPercent(t)&&(this.bbox.pwidth=t)}},e.prototype.stretchRows=function(){for(var t=this.node.attributes.get("equalrows"),e=t?this.getEqualRowHeight():0,r=t?this.getTableData():{H:[0],D:[0]},n=r.H,i=r.D,o=this.tableRows,a=0;an[r]&&(n[r]=s),c>i[r]&&(i[r]=c),o&&l>o[e]&&(o[e]=l)},e.prototype.recordPWidthCell=function(t,e){t.childNodes[0]&&t.childNodes[0].getBBox().pwidth&&this.pwidthCells.push([t,e])},e.prototype.computeBBox=function(t,e){void 0===e&&(e=!1);var r,n,i=this.getTableData(),o=i.H,a=i.D;if(this.node.attributes.get("equalrows")){var s=this.getEqualRowHeight();r=b.sum([].concat(this.rLines,this.rSpace))+s*this.numRows}else r=b.sum(o.concat(a,this.rLines,this.rSpace));r+=2*(this.fLine+this.fSpace[1]);var c=this.getComputedWidths();n=b.sum(c.concat(this.cLines,this.cSpace))+2*(this.fLine+this.fSpace[0]);var l=this.node.attributes.get("width");"auto"!==l&&(n=Math.max(this.length2em(l,0)+2*this.fLine,n));var u=y(this.getBBoxHD(r),2),h=u[0],f=u[1];t.h=h,t.d=f,t.w=n;var p=y(this.getBBoxLR(),2),d=p[0],m=p[1];t.L=d,t.R=m,v.isPercent(l)||this.setColumnPWidths()},e.prototype.setChildPWidths=function(t,e,r){var n=this.node.attributes.get("width");if(v.isPercent(n)){this.hasLabels||(this.bbox.pwidth="",this.container.bbox.pwidth="");var i=this.bbox,o=i.w,a=i.L,s=i.R,c=Math.max(o,this.length2em(n,Math.max(e,a+o+s))),l=this.node.attributes.get("equalcolumns")?Array(this.numCols).fill(this.percent(1/Math.max(1,this.numCols))):this.getColumnAttributes("columnwidth",0);this.cWidths=this.getColumnWidthsFixed(l,c);var u=this.getComputedWidths();return this.pWidth=b.sum(u.concat(this.cLines,this.cSpace))+2*(this.fLine+this.fSpace[0]),this.isTop&&(this.bbox.w=this.pWidth),this.setColumnPWidths(),this.pWidth!==o&&this.parent.invalidateBBox(),this.pWidth!==o}},e.prototype.setColumnPWidths=function(){var e,t,r=this.cWidths;try{for(var n=x(this.pwidthCells),i=n.next();!i.done;i=n.next()){var o=y(i.value,2),a=o[0],s=o[1];a.setChildPWidths(!1,r[s])&&(a.invalidateBBox(),a.getBBox())}}catch(t){e={error:t}}finally{try{i&&!i.done&&(t=n.return)&&t.call(n)}finally{if(e)throw e.error}}},e.prototype.getBBoxHD=function(t){var e=y(this.getAlignmentRow(),2),r=e[0],n=e[1];if(null===n){var i=this.font.params.axis_height,o=t/2;return{top:[0,t],center:[o,o],bottom:[t,0],baseline:[o,o],axis:[o+i,o-i]}[r]||[o,o]}var a=this.getVerticalPosition(n,r);return[a,t-a]},e.prototype.getBBoxLR=function(){if(this.hasLabels){var t=this.node.attributes.get("side"),e=y(this.getPadAlignShift(t),3),r=e[0],n=e[1];return e[2],"center"===n?[r,r]:"left"===t?[r,0]:[0,r]}return[0,0]},e.prototype.getPadAlignShift=function(t){var e=this.getTableData().L+this.length2em(this.node.attributes.get("minlabelspacing")),r=y(null==this.styles?["",""]:[this.styles.get("padding-left"),this.styles.get("padding-right")],2),n=r[0],i=r[1];(n||i)&&(e=Math.max(e,this.length2em(n||"0"),this.length2em(i||"0")));var o=y(this.getAlignShift(),2),a=o[0],s=o[1];return a===t&&(s="left"===t?Math.max(e,s)-e:Math.min(-e,s)+e),[e,a,s]},e.prototype.getAlignShift=function(){return this.isTop?i.prototype.getAlignShift.call(this):[this.container.getChildAlign(this.containerI),0]},e.prototype.getWidth=function(){return this.pWidth||this.getBBox().w},e.prototype.getEqualRowHeight=function(){var t=this.getTableData(),e=t.H,r=t.D,n=Array.from(e.keys()).map(function(t){return e[t]+r[t]});return Math.max.apply(Math,n)},e.prototype.getComputedWidths=function(){var e=this,r=this.getTableData().W,t=Array.from(r.keys()).map(function(t){return"number"==typeof e.cWidths[t]?e.cWidths[t]:r[t]});return this.node.attributes.get("equalcolumns")&&(t=Array(t.length).fill(b.max(t))),t},e.prototype.getColumnWidths=function(){var t=this.node.attributes.get("width");if(this.node.attributes.get("equalcolumns"))return this.getEqualColumns(t);var e=this.getColumnAttributes("columnwidth",0);return"auto"===t?this.getColumnWidthsAuto(e):v.isPercent(t)?this.getColumnWidthsPercent(e,t):this.getColumnWidthsFixed(e,this.length2em(t))},e.prototype.getEqualColumns=function(t){var e,r=Math.max(1,this.numCols);if("auto"===t){var n=this.getTableData().W;e=b.max(n)}else if(v.isPercent(t))e=this.percent(1/r);else{var i=b.sum([].concat(this.cLines,this.cSpace))+2*this.fSpace[0];e=Math.max(0,this.length2em(t)-i)/r}return Array(this.numCols).fill(e)},e.prototype.getColumnWidthsAuto=function(t){var e=this;return t.map(function(t){return"auto"===t||"fit"===t?null:v.isPercent(t)?t:e.length2em(t)})},e.prototype.getColumnWidthsPercent=function(r,t){var n=this,i=0<=r.indexOf("fit"),o=(i?this.getTableData():{W:null}).W;return Array.from(r.keys()).map(function(t){var e=r[t];return"fit"===e?null:"auto"===e?i?o[t]:null:v.isPercent(e)?e:n.length2em(e)})},e.prototype.getColumnWidthsFixed=function(r,n){var i=this,t=Array.from(r.keys()),o=t.filter(function(t){return"fit"===r[t]}),e=t.filter(function(t){return"auto"===r[t]}),a=o.length||e.length,s=(a?this.getTableData():{W:null}).W,c=n-b.sum([].concat(this.cLines,this.cSpace))-2*this.fSpace[0],l=c;t.forEach(function(t){var e=r[t];l-="fit"===e||"auto"===e?s[t]:i.length2em(e,n)});var u=a&&0this.numRows?null:n-1]},e.prototype.getColumnAttributes=function(t,e){void 0===e&&(e=1);var r=this.numCols-e,n=this.getAttributeArray(t);if(0!==n.length){for(;n.lengthr&&n.splice(r),n}},e.prototype.getRowAttributes=function(t,e){void 0===e&&(e=1);var r=this.numRows-e,n=this.getAttributeArray(t);if(0!==n.length){for(;n.lengthr&&n.splice(r),n}},e.prototype.getAttributeArray=function(t){var e=this.node.attributes.get(t);return e?v.split(e):[this.node.attributes.getDefault(t)]},e.prototype.addEm=function(t,e){var r=this;if(void 0===e&&(e=1),t)return t.map(function(t){return r.em(t/e)})},e.prototype.convertLengths=function(t){var e=this;if(t)return t.map(function(t){return e.length2em(t)})},e;function e(){for(var t=[],e=0;e mjx-mtd':{"vertical-align":"top"},'mjx-mtr[rowalign="center"] > mjx-mtd':{"vertical-align":"middle"},'mjx-mtr[rowalign="bottom"] > mjx-mtd':{"vertical-align":"bottom"},'mjx-mtr[rowalign="baseline"] > mjx-mtd':{"vertical-align":"baseline"},'mjx-mtr[rowalign="axis"] > mjx-mtd':{"vertical-align":".25em"}},h);function h(){return null!==o&&o.apply(this,arguments)||this}e.CHTMLmtr=u;var f,p=(f=c.CommonMlabeledtrMixin(u),i(d,f),d.prototype.toCHTML=function(t){f.prototype.toCHTML.call(this,t);var e=this.adaptor.firstChild(this.chtml);if(e){this.adaptor.remove(e);var r=this.node.attributes.get("rowalign"),n="baseline"!==r&&"axis"!==r?{rowalign:r}:{},i=this.html("mjx-mtr",n,[e]);this.adaptor.append(this.parent.labels,i)}},d.kind=l.MmlMlabeledtr.prototype.kind,d.styles={"mjx-mlabeledtr":{display:"table-row"},'mjx-mlabeledtr[rowalign="top"] > mjx-mtd':{"vertical-align":"top"},'mjx-mlabeledtr[rowalign="center"] > mjx-mtd':{"vertical-align":"middle"},'mjx-mlabeledtr[rowalign="bottom"] > mjx-mtd':{"vertical-align":"bottom"},'mjx-mlabeledtr[rowalign="baseline"] > mjx-mtd':{"vertical-align":"baseline"},'mjx-mlabeledtr[rowalign="axis"] > mjx-mtd':{"vertical-align":".25em"}},d);function d(){return null!==f&&f.apply(this,arguments)||this}e.CHTMLmlabeledtr=p},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)});Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(2),s=r(166),c=r(64),l=(o=s.CommonMtdMixin(a.CHTMLWrapper),i(u,o),u.prototype.toCHTML=function(t){o.prototype.toCHTML.call(this,t);var e=this.node.attributes.get("rowalign"),r=this.node.attributes.get("columnalign");e!==this.parent.node.attributes.get("rowalign")&&this.adaptor.setAttribute(this.chtml,"rowalign",e),"center"===r||"mlabeledtr"===this.parent.kind&&this===this.parent.childNodes[0]&&r===this.parent.parent.node.attributes.get("side")||this.adaptor.setStyle(this.chtml,"textAlign",r),this.adaptor.append(this.chtml,this.html("mjx-tstrut"))},u.kind=c.MmlMtd.prototype.kind,u.styles={"mjx-mtd":{display:"table-cell","text-align":"center",padding:".215em .4em"},"mjx-mtd:first-child":{"padding-left":0},"mjx-mtd:last-child":{"padding-right":0},"mjx-mtable > * > mjx-itable > *:first-child > mjx-mtd":{"padding-top":0},"mjx-mtable > * > mjx-itable > *:last-child > mjx-mtd":{"padding-bottom":0},"mjx-tstrut":{display:"inline-block",height:"1em","vertical-align":"-.25em"},'mjx-labels[align="left"] > mjx-mtr > mjx-mtd':{"text-align":"left"},'mjx-labels[align="right"] > mjx-mtr > mjx-mtd':{"text-align":"right"},'mjx-mtr mjx-mtd[rowalign="top"], mjx-mlabeledtr mjx-mtd[rowalign="top"]':{"vertical-align":"top"},'mjx-mtr mjx-mtd[rowalign="center"], mjx-mlabeledtr mjx-mtd[rowalign="center"]':{"vertical-align":"middle"},'mjx-mtr mjx-mtd[rowalign="bottom"], mjx-mlabeledtr mjx-mtd[rowalign="bottom"]':{"vertical-align":"bottom"},'mjx-mtr mjx-mtd[rowalign="baseline"], mjx-mlabeledtr mjx-mtd[rowalign="baseline"]':{"vertical-align":"baseline"},'mjx-mtr mjx-mtd[rowalign="axis"], mjx-mlabeledtr mjx-mtd[rowalign="axis"]':{"vertical-align":".25em"}},u);function u(){return null!==o&&o.apply(this,arguments)||this}e.CHTMLmtd=l},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)});Object.defineProperty(e,"__esModule",{value:!0}),e.CommonMtdMixin=function(t){return i(e,r=t),Object.defineProperty(e.prototype,"fixesPWidth",{get:function(){return!1},enumerable:!0,configurable:!0}),e.prototype.invalidateBBox=function(){this.bboxComputed=!1},e.prototype.getWrapWidth=function(t){var e=this.parent.parent,r=this.parent,n=this.node.childPosition()-(r.labeled?1:0);return"number"==typeof e.cWidths[n]?e.cWidths[n]:e.getTableData().W[n]},e.prototype.getChildAlign=function(t){return this.node.attributes.get("columnalign")},e;function e(){return null!==r&&r.apply(this,arguments)||this}var r}},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)});Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(2),s=r(76),c=r(76),l=r(59),u=(o=s.CommonMactionMixin(a.CHTMLWrapper),i(h,o),h.prototype.toCHTML=function(t){var e=this.standardCHTMLnode(t);this.selected.toCHTML(e),this.action(this,this.data)},h.prototype.setEventHandler=function(t,e){this.chtml.addEventListener(t,e)},h.kind=l.MmlMaction.prototype.kind,h.styles={"mjx-maction":{position:"relative"},"mjx-maction > mjx-tool":{display:"none",position:"absolute",bottom:0,right:0,width:0,height:0,"z-index":500},"mjx-tool > mjx-tip":{display:"inline-block",padding:".2em",border:"1px solid #888","font-size":"70%","background-color":"#F8F8F8",color:"black","box-shadow":"2px 2px 5px #AAAAAA"},"mjx-maction[toggle]":{cursor:"pointer"},"mjx-status":{display:"block",position:"fixed",left:"1em",bottom:"1em","min-width":"25%",padding:".2em .4em",border:"1px solid #888","font-size":"90%","background-color":"#F8F8F8",color:"black"}},h.actions=new Map([["toggle",[function(t,e){t.adaptor.setAttribute(t.chtml,"toggle",t.node.attributes.get("selection"));var r=t.factory.jax.math,n=t.factory.jax.document,i=t.node;t.setEventHandler("click",function(t){r.start.node||(r.start.node=r.end.node=r.typesetRoot,r.start.n=r.end.n=0),i.nextToggleSelection(),r.rerender(n),t.stopPropagation()})},{}]],["tooltip",[function(r,n){var t=r.childNodes[1];if(t)if(t.node.isKind("mtext")){var e=t.node.getText();r.adaptor.setAttribute(r.chtml,"title",e)}else{var i=r.adaptor,o=i.append(r.chtml,r.html("mjx-tool",{style:{bottom:r.em(-r.dy),right:r.em(-r.dx)}},[r.html("mjx-tip")]));t.toCHTML(i.firstChild(o)),r.setEventHandler("mouseover",function(t){n.stopTimers(r,n);var e=setTimeout(function(){return i.setStyle(o,"display","block")},n.postDelay);n.hoverTimer.set(r,e),t.stopPropagation()}),r.setEventHandler("mouseout",function(t){n.stopTimers(r,n);var e=setTimeout(function(){return i.setStyle(o,"display","")},n.clearDelay);n.clearTimer.set(r,e),t.stopPropagation()})}},c.TooltipData]],["statusline",[function(r,n){var t=r.childNodes[1];if(t&&t.node.isKind("mtext")){var i=r.adaptor,o=t.node.getText();i.setAttribute(r.chtml,"statusline",o),r.setEventHandler("mouseover",function(t){if(null===n.status){var e=i.body(i.document);n.status=i.append(e,r.html("mjx-status",{},[r.text(o)]))}t.stopPropagation()}),r.setEventHandler("mouseout",function(t){n.status&&(i.remove(n.status),n.status=null),t.stopPropagation()})}},{status:null}]]]),h);function h(){return null!==o&&o.apply(this,arguments)||this}e.CHTMLmaction=u},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)});Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(2),s=r(169),c=r(65),l=(o=s.CommonMglyphMixin(a.CHTMLWrapper),i(u,o),u.prototype.toCHTML=function(t){var e=this.standardCHTMLnode(t),r=this.node.attributes.getList("src","alt"),n=r.src,i=r.alt,o={width:this.em(this.width),height:this.em(this.height)};this.voffset&&(o.verticalAlign=this.em(-this.voffset));var a=this.html("img",{src:n,style:o,alt:i,title:i});this.adaptor.append(e,a)},u.kind=c.MmlMglyph.prototype.kind,u.styles={"mjx-mglyph > img":{display:"inline-block",border:0,padding:0}},u);function u(){return null!==o&&o.apply(this,arguments)||this}e.CHTMLmglyph=l},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),o=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(0),s=r(2),c=r(175),l=(o=c.CommonTextNodeMixin(s.CHTMLWrapper),i(u,o),u.prototype.toCHTML=function(t){var e,r;this.markUsed();var n=this.adaptor,i=this.parent.variant,o=this.node.getText();if("-explicitFont"===i){var a=this.jax.getFontData(this.parent.styles);n.append(t,this.jax.unknownText(o,i,a))}else{var s=this.parent.stretch.c,c=this.parent.remapChars(s?[s]:this.unicodeChars(o));try{for(var l=d(c),u=l.next();!u.done;u=l.next()){var h=u.value,f=this.getVariantChar(i,h)[3],p=f.unknown?this.jax.unknownText(String.fromCharCode(h),i):this.html("mjx-c",{class:this.char(h)});n.append(t,p),f.used=!0}}catch(t){e={error:t}}finally{try{u&&!u.done&&(r=l.return)&&r.call(l)}finally{if(e)throw e.error}}}},u.kind=a.TextNode.prototype.kind,u.autoStyle=!1,u.styles={"mjx-c":{display:"inline-block"},"mjx-utext":{display:"inline-block",padding:".75em 0 .25em 0"},"mjx-measure-text":{position:"absolute","font-family":"MJXZERO","white-space":"nowrap",height:"1px",width:"1px",overflow:"hidden"}},u);function u(){return null!==o&&o.apply(this,arguments)||this}e.CHTMLTextNode=l},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),g=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},M=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0t.h&&(t.h=l),u>t.d&&(t.d=u),t.ic=v.ic||0,t.sk=v.sk||0}}catch(t){r={error:t}}finally{try{d&&!d.done&&(n=p.return)&&n.call(p)}finally{if(r)throw r.error}}1"},63:{c:"?"},64:{c:"@"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},91:{c:"["},93:{c:"]"},94:{c:"^"},95:{c:"_"},96:{c:"`"},97:{c:"a"},98:{c:"b"},99:{c:"c"},100:{c:"d"},101:{c:"e"},102:{c:"f"},103:{c:"g"},104:{c:"h"},105:{c:"i"},106:{c:"j"},107:{c:"k"},108:{c:"l"},109:{c:"m"},110:{c:"n"},111:{c:"o"},112:{c:"p"},113:{c:"q"},114:{c:"r"},115:{c:"s"},116:{c:"t"},117:{c:"u"},118:{c:"v"},119:{c:"w"},120:{c:"x"},121:{c:"y"},122:{c:"z"},123:{c:"{"},124:{c:"|"},125:{c:"}"},126:{c:"~"},183:{c:"\\22C5"},697:{c:"\\2032"},913:{c:"A"},914:{c:"B"},917:{c:"E"},918:{c:"Z"},919:{c:"H"},921:{c:"I"},922:{c:"K"},924:{c:"M"},925:{c:"N"},927:{c:"O"},929:{c:"P"},930:{c:"\\398"},932:{c:"T"},935:{c:"X"},978:{c:"\\3A5"},988:{c:"F"},8194:{c:""},8195:{c:""},8196:{c:""},8197:{c:""},8198:{c:""},8201:{c:""},8202:{c:""},8213:{c:"\\2014"},8214:{c:"\\2225"},8215:{c:"_"},8226:{c:"\\2219"},8243:{c:"\\2032\\2032"},8244:{c:"\\2032\\2032\\2032"},8254:{c:"\\2C9"},8260:{c:"/"},8279:{c:"\\2032\\2032\\2032\\2032"},8407:{c:"\\2192",f:"VB"},8465:{c:"I",f:"FR"},8476:{c:"R",f:"FR"},8602:{c:"\\2190\\338"},8603:{c:"\\2192\\338"},8622:{c:"\\2194\\338"},8653:{c:"\\21D0\\338"},8654:{c:"\\21D4\\338"},8655:{c:"\\21D2\\338"},8708:{c:"\\2203\\338"},8710:{c:"\\394"},8716:{c:"\\220B\\338"},8740:{c:"\\2223\\338"},8742:{c:"\\2225\\338"},8769:{c:"\\223C\\338"},8772:{c:"\\2243\\338"},8775:{c:"\\2245\\338"},8777:{c:"\\2248\\338"},8802:{c:"\\2261\\338"},8813:{c:"\\224D\\338"},8814:{c:"<\\338"},8815:{c:">\\338"},8816:{c:"\\2264\\338"},8817:{c:"\\2265\\338"},8832:{c:"\\227A\\338"},8833:{c:"\\227B\\338"},8836:{c:"\\2282\\338"},8837:{c:"\\2283\\338"},8840:{c:"\\2286\\338"},8841:{c:"\\2287\\338"},8876:{c:"\\22A2\\338"},8877:{c:"\\22A8\\338"},8930:{c:"\\2291\\338"},8931:{c:"\\2292\\338"},9001:{c:"\\27E8"},9002:{c:"\\27E9"},9653:{c:"\\25B3"},9663:{c:"\\25BD"},10072:{c:"\\2223"},10744:{c:"/",f:"BI"},10799:{c:"\\D7"},12296:{c:"\\27E8"},12297:{c:"\\27E9"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.bold={32:[0,0,.25],33:[.705,0,.35],34:[.694,-.329,.603],35:[.694,.193,.958],36:[.75,.056,.575],37:[.75,.056,.958],38:[.705,.011,.894],39:[.694,-.329,.319],40:[.75,.249,.447],41:[.75,.249,.447],42:[.75,-.306,.575],43:[.633,.131,.894],44:[.171,.194,.319],45:[.278,-.166,.383],46:[.171,0,.319],47:[.75,.25,.575],48:[.654,.01,.575],49:[.655,0,.575],50:[.654,0,.575],51:[.655,.011,.575],52:[.656,0,.575],53:[.655,.011,.575],54:[.655,.011,.575],55:[.676,.011,.575],56:[.654,.011,.575],57:[.654,.011,.575],58:[.444,0,.319],59:[.444,.194,.319],60:[.587,.085,.894],61:[.393,-.109,.894],62:[.587,.085,.894],63:[.7,0,.543],64:[.699,.006,.894],65:[.698,0,.869],66:[.686,0,.818],67:[.697,.011,.831],68:[.686,0,.882],69:[.68,0,.756],70:[.68,0,.724],71:[.697,.01,.904],72:[.686,0,.9],73:[.686,0,.436],74:[.686,.011,.594],75:[.686,0,.901],76:[.686,0,.692],77:[.686,0,1.092],78:[.686,0,.9],79:[.696,.01,.864],80:[.686,0,.786],81:[.696,.193,.864],82:[.686,.011,.862],83:[.697,.011,.639],84:[.675,0,.8],85:[.686,.011,.885],86:[.686,.007,.869],87:[.686,.007,1.189],88:[.686,0,.869],89:[.686,0,.869],90:[.686,0,.703],91:[.75,.25,.319],92:[.75,.25,.575],93:[.75,.25,.319],94:[.694,-.52,.575],95:[-.01,.061,.575],96:[.706,-.503,.575],97:[.453,.006,.559],98:[.694,.006,.639],99:[.453,.006,.511],100:[.694,.006,.639],101:[.452,.006,.527],102:[.7,0,.351,{ic:.101}],103:[.455,.201,.575],104:[.694,0,.639],105:[.695,0,.319],106:[.695,.2,.351],107:[.694,0,.607],108:[.694,0,.319],109:[.45,0,.958],110:[.45,0,.639],111:[.452,.005,.575],112:[.45,.194,.639],113:[.45,.194,.607],114:[.45,0,.474],115:[.453,.006,.454],116:[.635,.005,.447],117:[.45,.006,.639],118:[.444,0,.607],119:[.444,0,.831],120:[.444,0,.607],121:[.444,.2,.607],122:[.444,0,.511],123:[.75,.25,.575],124:[.75,.249,.319],125:[.75,.25,.575],126:[.344,-.202,.575],160:[0,0,.25],168:[.695,-.535,.575],172:[.371,-.061,.767],175:[.607,-.54,.575],176:[.702,-.536,.575],177:[.728,.035,.894],180:[.706,-.503,.575],183:[.336,-.166,.319],215:[.53,.028,.894],247:[.597,.096,.894],305:[.452,.008,.394,{sk:.0319}],567:[.451,.201,.439,{sk:.0958}],697:[.563,-.033,.344],710:[.694,-.52,.575],711:[.66,-.515,.575],713:[.607,-.54,.575],714:[.706,-.503,.575],715:[.706,-.503,.575],728:[.694,-.5,.575],729:[.695,-.525,.575],730:[.702,-.536,.575],732:[.694,-.552,.575],768:[.706,-.503,0],769:[.706,-.503,0],770:[.694,-.52,0],771:[.694,-.552,0],772:[.607,-.54,0],774:[.694,-.5,0],775:[.695,-.525,0],776:[.695,-.535,0],778:[.702,-.536,0],779:[.714,-.511,0],780:[.66,-.515,0],824:[.711,.21,0],913:[.698,0,.869],914:[.686,0,.818],915:[.68,0,.692],916:[.698,0,.958],917:[.68,0,.756],918:[.686,0,.703],919:[.686,0,.9],920:[.696,.01,.894],921:[.686,0,.436],922:[.686,0,.901],923:[.698,0,.806],924:[.686,0,1.092],925:[.686,0,.9],926:[.675,0,.767],927:[.696,.01,.864],928:[.68,0,.9],929:[.686,0,.786],930:[.696,.01,.894],931:[.686,0,.831],932:[.675,0,.8],933:[.697,0,.894],934:[.686,0,.831],935:[.686,0,.869],936:[.686,0,.894],937:[.696,0,.831],945:[.452,.008,.761,{sk:.0319}],946:[.701,.194,.66,{sk:.0958}],947:[.451,.211,.59],948:[.725,.008,.522,{sk:.0639}],949:[.461,.017,.529,{sk:.0958}],950:[.711,.202,.508,{sk:.0958}],951:[.452,.211,.6,{sk:.0639}],952:[.702,.008,.562,{sk:.0958}],953:[.452,.008,.412,{sk:.0639}],954:[.452,.008,.668],955:[.694,.013,.671],956:[.452,.211,.708,{sk:.0319}],957:[.452,0,.577,{sk:.0319}],958:[.711,.201,.508,{sk:.128}],959:[.452,.008,.585,{sk:.0639}],960:[.444,.008,.682],961:[.451,.211,.612,{sk:.0958}],962:[.451,.105,.424,{sk:.0958}],963:[.444,.008,.686],964:[.444,.013,.521,{ic:.089,sk:.0319}],965:[.453,.008,.631,{sk:.0319}],966:[.452,.216,.747,{sk:.0958}],967:[.452,.201,.718,{sk:.0639}],968:[.694,.202,.758,{sk:.128}],969:[.453,.008,.718],977:[.701,.008,.692,{sk:.0958}],978:[.697,0,.894],981:[.694,.202,.712,{sk:.0958}],982:[.444,.008,.975],988:[.68,0,.724],1009:[.451,.194,.612,{sk:.0958}],1013:[.444,.007,.483,{sk:.0639}],8194:[0,0,.5],8195:[0,0,.999],8196:[0,0,.333],8197:[0,0,.25],8198:[0,0,.167],8201:[0,0,.167],8202:[0,0,.083],8211:[.3,-.249,.575],8212:[.3,-.249,1.15],8213:[.3,-.249,1.15],8214:[.75,.248,.575],8215:[-.01,.061,.575],8216:[.694,-.329,.319],8217:[.694,-.329,.319],8220:[.694,-.329,.603],8221:[.694,-.329,.603],8224:[.702,.211,.511],8225:[.702,.202,.511],8226:[.474,-.028,.575],8230:[.171,0,1.295],8242:[.563,-.033,.344],8243:[.563,0,.688],8244:[.563,0,1.032],8254:[.607,-.54,.575],8260:[.75,.25,.575],8279:[.563,0,1.376],8407:[.723,-.513,.575],8463:[.694,.008,.668,{sk:-.0319}],8465:[.686,.026,.554],8467:[.702,.019,.474,{sk:.128}],8472:[.461,.21,.74],8476:[.686,.026,.828],8501:[.694,0,.703],8592:[.518,.017,1.15],8593:[.694,.193,.575],8594:[.518,.017,1.15],8595:[.694,.194,.575],8596:[.518,.017,1.15],8597:[.767,.267,.575],8598:[.724,.194,1.15],8599:[.724,.193,1.15],8600:[.694,.224,1.15],8601:[.694,.224,1.15],8602:[.711,.21,1.15],8603:[.711,.21,1.15],8614:[.518,.017,1.15],8617:[.518,.017,1.282],8618:[.518,.017,1.282],8622:[.711,.21,1.15],8636:[.518,-.22,1.15],8637:[.281,.017,1.15],8640:[.518,-.22,1.15],8641:[.281,.017,1.15],8652:[.718,.017,1.15],8653:[.711,.21,1.15],8654:[.711,.21,1.15],8655:[.711,.21,1.15],8656:[.547,.046,1.15],8657:[.694,.193,.703],8658:[.547,.046,1.15],8659:[.694,.194,.703],8660:[.547,.046,1.15],8661:[.767,.267,.703],8704:[.694,.016,.639],8706:[.71,.017,.628,{sk:.0958}],8707:[.694,0,.639],8708:[.711,.21,.639],8709:[.767,.073,.575],8710:[.698,0,.958],8711:[.686,.024,.958],8712:[.587,.086,.767],8713:[.711,.21,.767],8715:[.587,.086,.767],8716:[.711,.21,.767],8722:[.281,-.221,.894],8723:[.537,.227,.894],8725:[.75,.25,.575],8726:[.75,.25,.575],8727:[.472,-.028,.575],8728:[.474,-.028,.575],8729:[.474,-.028,.575],8730:[.82,.18,.958],8733:[.451,.008,.894],8734:[.452,.008,1.15],8736:[.714,0,.722],8739:[.75,.249,.319],8740:[.75,.249,.319],8741:[.75,.248,.575],8742:[.75,.248,.575],8743:[.604,.017,.767],8744:[.604,.016,.767],8745:[.603,.016,.767],8746:[.604,.016,.767],8747:[.711,.211,.569,{ic:.063}],8764:[.391,-.109,.894],8768:[.583,.082,.319],8769:[.711,.21,.894],8771:[.502,0,.894],8772:[.711,.21,.894],8773:[.638,.027,.894],8775:[.711,.21,.894],8776:[.524,-.032,.894],8777:[.711,.21,.894],8781:[.533,.032,.894],8784:[.721,-.109,.894],8800:[.711,.21,.894],8801:[.505,0,.894],8802:[.711,.21,.894],8804:[.697,.199,.894],8805:[.697,.199,.894],8810:[.617,.116,1.15],8811:[.618,.116,1.15],8813:[.711,.21,.894],8814:[.711,.21,.894],8815:[.711,.21,.894],8816:[.711,.21,.894],8817:[.711,.21,.894],8826:[.585,.086,.894],8827:[.586,.086,.894],8832:[.711,.21,.894],8833:[.711,.21,.894],8834:[.587,.085,.894],8835:[.587,.086,.894],8836:[.711,.21,.894],8837:[.711,.21,.894],8838:[.697,.199,.894],8839:[.697,.199,.894],8840:[.711,.21,.894],8841:[.711,.21,.894],8846:[.604,.016,.767],8849:[.697,.199,.894],8850:[.697,.199,.894],8851:[.604,0,.767],8852:[.604,0,.767],8853:[.632,.132,.894],8854:[.632,.132,.894],8855:[.632,.132,.894],8856:[.632,.132,.894],8857:[.632,.132,.894],8866:[.693,0,.703],8867:[.693,0,.703],8868:[.694,0,.894],8869:[.693,0,.894],8872:[.75,.249,.974],8876:[.711,.21,.703],8877:[.75,.249,.974],8900:[.523,.021,.575],8901:[.336,-.166,.319],8902:[.502,0,.575],8904:[.54,.039,1],8930:[.711,.21,.894],8931:[.711,.21,.894],8942:[.951,.029,.319],8943:[.336,-.166,1.295],8945:[.871,-.101,1.323],8968:[.75,.248,.511],8969:[.75,.248,.511],8970:[.749,.248,.511],8971:[.749,.248,.511],8994:[.405,-.108,1.15],8995:[.392,-.126,1.15],9001:[.75,.249,.447],9002:[.75,.249,.447],9651:[.711,0,1.022],9653:[.711,0,1.022],9657:[.54,.039,.575],9661:[.5,.21,1.022],9663:[.5,.21,1.022],9667:[.539,.038,.575],9711:[.711,.211,1.15],9824:[.719,.129,.894],9825:[.711,.024,.894],9826:[.719,.154,.894],9827:[.719,.129,.894],9837:[.75,.017,.447],9838:[.741,.223,.447],9839:[.724,.224,.447],10072:[.75,.249,.319],10216:[.75,.249,.447],10217:[.75,.249,.447],10229:[.518,.017,1.805],10230:[.518,.017,1.833],10231:[.518,.017,2.126],10232:[.547,.046,1.868],10233:[.547,.046,1.87],10234:[.547,.046,2.126],10236:[.518,.017,1.833],10744:[.711,.21,.894],10799:[.53,.028,.894],10815:[.686,0,.9],10927:[.696,.199,.894],10928:[.697,.199,.894],12296:[.75,.249,.447],12297:[.75,.249,.447]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(181);e.doubleStruck=n.AddCSS(i.doubleStruck,{32:{c:" "},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},107:{c:"k"},913:{c:"A",f:"B"},914:{c:"B",f:"B"},917:{c:"E",f:"B"},918:{c:"Z",f:"B"},919:{c:"H",f:"B"},921:{c:"I",f:"B"},922:{c:"K",f:"B"},924:{c:"M",f:"B"},925:{c:"N",f:"B"},927:{c:"O",f:"B"},929:{c:"P",f:"B"},930:{c:"\\398",f:"B"},932:{c:"T",f:"B"},935:{c:"X",f:"B"},978:{c:"\\3A5",f:"B"},988:{c:"F",f:"B"},8450:{c:"C",f:"A"},8461:{c:"H",f:"A"},8469:{c:"N",f:"A"},8473:{c:"P",f:"A"},8474:{c:"Q",f:"A"},8477:{c:"R",f:"A"},8484:{c:"Z",f:"A"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.doubleStruck={32:[0,0,.25],65:[.701,0,.722],66:[.683,0,.667],67:[.702,.019,.722],68:[.683,0,.722],69:[.683,0,.667],70:[.683,0,.611],71:[.702,.019,.778],72:[.683,0,.778],73:[.683,0,.389],74:[.683,.077,.5],75:[.683,0,.778],76:[.683,0,.667],77:[.683,0,.944],78:[.683,.02,.722],79:[.701,.019,.778],80:[.683,0,.611],81:[.701,.181,.778],82:[.683,0,.722],83:[.702,.012,.556],84:[.683,0,.667],85:[.683,.019,.722],86:[.683,.02,.722],87:[.683,.019,1],88:[.683,0,.722],89:[.683,0,.722],90:[.683,0,.667],107:[.683,0,.556],160:[0,0,.25],913:[.698,0,.869],914:[.686,0,.818],917:[.68,0,.756],918:[.686,0,.703],919:[.686,0,.9],921:[.686,0,.436],922:[.686,0,.901],924:[.686,0,1.092],925:[.686,0,.9],927:[.696,.01,.864],929:[.686,0,.786],930:[.696,.01,.894],932:[.675,0,.8],935:[.686,0,.869],978:[.697,0,.894],988:[.68,0,.724],8450:[.702,.019,.722],8461:[.683,0,.778],8469:[.683,.02,.722],8473:[.683,0,.611],8474:[.701,.181,.778],8477:[.683,0,.722],8484:[.683,0,.667]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(183);e.frakturBold=n.AddCSS(i.frakturBold,{32:{c:" "},33:{c:"!"},38:{c:"&"},40:{c:"("},41:{c:")"},42:{c:"*"},43:{c:"+"},44:{c:","},45:{c:"-"},46:{c:"."},47:{c:"/"},48:{c:"0"},49:{c:"1"},50:{c:"2"},51:{c:"3"},52:{c:"4"},53:{c:"5"},54:{c:"6"},55:{c:"7"},56:{c:"8"},57:{c:"9"},58:{c:":"},59:{c:";"},61:{c:"="},63:{c:"?"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},91:{c:"["},93:{c:"]"},94:{c:"^"},97:{c:"a"},98:{c:"b"},99:{c:"c"},100:{c:"d"},101:{c:"e"},102:{c:"f"},103:{c:"g"},104:{c:"h"},105:{c:"i"},106:{c:"j"},107:{c:"k"},108:{c:"l"},109:{c:"m"},110:{c:"n"},111:{c:"o"},112:{c:"p"},113:{c:"q"},114:{c:"r"},115:{c:"s"},116:{c:"t"},117:{c:"u"},118:{c:"v"},119:{c:"w"},120:{c:"x"},121:{c:"y"},122:{c:"z"},913:{c:"A",f:"B"},914:{c:"B",f:"B"},917:{c:"E",f:"B"},918:{c:"Z",f:"B"},919:{c:"H",f:"B"},921:{c:"I",f:"B"},922:{c:"K",f:"B"},924:{c:"M",f:"B"},925:{c:"N",f:"B"},927:{c:"O",f:"B"},929:{c:"P",f:"B"},930:{c:"\\398",f:"B"},932:{c:"T",f:"B"},935:{c:"X",f:"B"},978:{c:"\\3A5",f:"B"},988:{c:"F",f:"B"},8260:{c:"/"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.frakturBold={32:[0,0,.25],33:[.689,.012,.349],34:[.695,-.432,.254],38:[.696,.016,.871],39:[.695,-.436,.25],40:[.737,.186,.459],41:[.735,.187,.459],42:[.692,-.449,.328],43:[.598,.082,.893],44:[.107,.191,.328],45:[.275,-.236,.893],46:[.102,.015,.328],47:[.721,.182,.593],48:[.501,.012,.593],49:[.489,0,.593],50:[.491,0,.593],51:[.487,.193,.593],52:[.495,.196,.593],53:[.481,.19,.593],54:[.704,.012,.593],55:[.479,.197,.593],56:[.714,.005,.593],57:[.487,.195,.593],58:[.457,.012,.255],59:[.458,.19,.255],61:[.343,-.168,.582],63:[.697,.014,.428],65:[.686,.031,.847],66:[.684,.031,1.044],67:[.676,.032,.723],68:[.683,.029,.982],69:[.686,.029,.783],70:[.684,.146,.722],71:[.687,.029,.927],72:[.683,.126,.851],73:[.681,.025,.655],74:[.68,.141,.652],75:[.681,.026,.789],76:[.683,.028,.786],77:[.683,.032,1.239],78:[.679,.03,.983],79:[.726,.03,.976],80:[.688,.223,.977],81:[.726,.083,.976],82:[.688,.028,.978],83:[.685,.031,.978],84:[.686,.03,.79],85:[.688,.039,.851],86:[.685,.029,.982],87:[.683,.03,1.235],88:[.681,.035,.849],89:[.688,.214,.984],90:[.677,.148,.711],91:[.74,.13,.257],93:[.738,.132,.257],94:[.734,-.452,.59],97:[.472,.032,.603],98:[.69,.032,.59],99:[.473,.026,.464],100:[.632,.028,.589],101:[.471,.027,.472],102:[.687,.222,.388],103:[.472,.208,.595],104:[.687,.207,.615],105:[.686,.025,.331],106:[.682,.203,.332],107:[.682,.025,.464],108:[.681,.024,.337],109:[.476,.031,.921],110:[.473,.028,.654],111:[.482,.034,.609],112:[.557,.207,.604],113:[.485,.211,.596],114:[.472,.026,.46],115:[.479,.034,.523],116:[.648,.027,.393],117:[.472,.032,.589],118:[.546,.027,.604],119:[.549,.032,.918],120:[.471,.188,.459],121:[.557,.221,.589],122:[.471,.214,.461],160:[0,0,.25],913:[.698,0,.869],914:[.686,0,.818],917:[.68,0,.756],918:[.686,0,.703],919:[.686,0,.9],921:[.686,0,.436],922:[.686,0,.901],924:[.686,0,1.092],925:[.686,0,.9],927:[.696,.01,.864],929:[.686,0,.786],930:[.696,.01,.894],932:[.675,0,.8],935:[.686,0,.869],978:[.697,0,.894],988:[.68,0,.724],8216:[.708,-.411,.254],8217:[.692,-.394,.254],8260:[.721,.182,.593],58113:[.63,.027,.587],58114:[.693,.212,.394],58115:[.681,.219,.387],58116:[.473,.212,.593],58117:[.684,.027,.393],58120:[.679,.22,.981],58121:[.717,.137,.727]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(185);e.fraktur=n.AddCSS(i.fraktur,{32:{c:" "},33:{c:"!"},38:{c:"&"},40:{c:"("},41:{c:")"},42:{c:"*"},43:{c:"+"},44:{c:","},45:{c:"-"},46:{c:"."},47:{c:"/"},48:{c:"0"},49:{c:"1"},50:{c:"2"},51:{c:"3"},52:{c:"4"},53:{c:"5"},54:{c:"6"},55:{c:"7"},56:{c:"8"},57:{c:"9"},58:{c:":"},59:{c:";"},61:{c:"="},63:{c:"?"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},91:{c:"["},93:{c:"]"},94:{c:"^"},97:{c:"a"},98:{c:"b"},99:{c:"c"},100:{c:"d"},101:{c:"e"},102:{c:"f"},103:{c:"g"},104:{c:"h"},105:{c:"i"},106:{c:"j"},107:{c:"k"},108:{c:"l"},109:{c:"m"},110:{c:"n"},111:{c:"o"},112:{c:"p"},113:{c:"q"},114:{c:"r"},115:{c:"s"},116:{c:"t"},117:{c:"u"},118:{c:"v"},119:{c:"w"},120:{c:"x"},121:{c:"y"},122:{c:"z"},913:{c:"A",f:""},914:{c:"B",f:""},917:{c:"E",f:""},918:{c:"Z",f:""},919:{c:"H",f:""},921:{c:"I",f:""},922:{c:"K",f:""},924:{c:"M",f:""},925:{c:"N",f:""},927:{c:"O",f:""},929:{c:"P",f:""},930:{c:"\\398",f:""},932:{c:"T",f:""},935:{c:"X",f:""},978:{c:"\\3A5",f:""},988:{c:"F",f:""},8260:{c:"/"},8460:{c:"H",f:"FR"},8465:{c:"I",f:"FR"},8476:{c:"R",f:"FR"},8488:{c:"Z",f:"FR"},8493:{c:"C",f:"FR"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.fraktur={32:[0,0,.25],33:[.689,.012,.296],34:[.695,-.432,.215],38:[.698,.011,.738],39:[.695,-.436,.212],40:[.737,.186,.389],41:[.735,.187,.389],42:[.692,-.449,.278],43:[.598,.082,.756],44:[.107,.191,.278],45:[.275,-.236,.756],46:[.102,.015,.278],47:[.721,.182,.502],48:[.492,.013,.502],49:[.468,0,.502],50:[.474,0,.502],51:[.473,.182,.502],52:[.476,.191,.502],53:[.458,.184,.502],54:[.7,.013,.502],55:[.468,.181,.502],56:[.705,.01,.502],57:[.469,.182,.502],58:[.457,.012,.216],59:[.458,.189,.216],61:[.368,-.132,.756],63:[.693,.011,.362],65:[.696,.026,.718],66:[.691,.027,.884],67:[.685,.024,.613],68:[.685,.027,.832],69:[.685,.024,.663],70:[.686,.153,.611],71:[.69,.026,.785],72:[.666,.133,.72],73:[.686,.026,.554],74:[.686,.139,.552],75:[.68,.027,.668],76:[.686,.026,.666],77:[.692,.027,1.05],78:[.686,.025,.832],79:[.729,.027,.827],80:[.692,.218,.828],81:[.729,.069,.827],82:[.686,.026,.828],83:[.692,.027,.829],84:[.701,.027,.669],85:[.697,.027,.646],86:[.686,.026,.831],87:[.686,.027,1.046],88:[.688,.027,.719],89:[.686,.218,.833],90:[.729,.139,.602],91:[.74,.13,.278],93:[.738,.131,.278],94:[.734,-.452,.5],97:[.47,.035,.5],98:[.685,.031,.513],99:[.466,.029,.389],100:[.609,.033,.499],101:[.467,.03,.401],102:[.681,.221,.326],103:[.47,.209,.504],104:[.688,.205,.521],105:[.673,.02,.279],106:[.672,.208,.281],107:[.689,.025,.389],108:[.685,.02,.28],109:[.475,.026,.767],110:[.475,.022,.527],111:[.48,.028,.489],112:[.541,.212,.5],113:[.479,.219,.489],114:[.474,.021,.389],115:[.478,.029,.443],116:[.64,.02,.333],117:[.474,.023,.517],118:[.53,.028,.512],119:[.532,.028,.774],120:[.472,.188,.389],121:[.528,.218,.499],122:[.471,.214,.391],160:[0,0,.25],913:[.716,0,.75],914:[.683,0,.708],917:[.68,0,.681],918:[.683,0,.611],919:[.683,0,.75],921:[.683,0,.361],922:[.683,0,.778],924:[.683,0,.917],925:[.683,0,.75],927:[.705,.022,.778],929:[.683,0,.681],930:[.705,.022,.778],932:[.677,0,.722],935:[.683,0,.75],978:[.705,0,.778],988:[.68,0,.653],8216:[.708,-.41,.215],8217:[.692,-.395,.215],8260:[.721,.182,.502],8460:[.666,.133,.72],8465:[.686,.026,.554],8476:[.686,.026,.828],8488:[.729,.139,.602],8493:[.685,.024,.613],58112:[.683,.032,.497],58113:[.616,.03,.498],58114:[.68,.215,.333],58115:[.679,.224,.329],58116:[.471,.214,.503],58117:[.686,.02,.333],58118:[.577,.021,.334],58119:[.475,.022,.501]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(187);e.italic=n.AddCSS(i.italic,{32:{c:" "},33:{c:"!"},35:{c:"#"},37:{c:"%"},38:{c:"&"},40:{c:"("},41:{c:")"},42:{c:"*"},43:{c:"+"},44:{c:","},45:{c:"-"},46:{c:"."},47:{c:"/"},48:{c:"0"},49:{c:"1"},50:{c:"2"},51:{c:"3"},52:{c:"4"},53:{c:"5"},54:{c:"6"},55:{c:"7"},56:{c:"8"},57:{c:"9"},58:{c:":"},59:{c:";"},61:{c:"="},63:{c:"?"},64:{c:"@"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},91:{c:"["},93:{c:"]"},94:{c:"^"},95:{c:"_"},97:{c:"a"},98:{c:"b"},99:{c:"c"},100:{c:"d"},101:{c:"e"},102:{c:"f"},103:{c:"g"},104:{c:"h"},105:{c:"i"},106:{c:"j"},107:{c:"k"},108:{c:"l"},109:{c:"m"},110:{c:"n"},111:{c:"o"},112:{c:"p"},113:{c:"q"},114:{c:"r"},115:{c:"s"},116:{c:"t"},117:{c:"u"},118:{c:"v"},119:{c:"w"},120:{c:"x"},121:{c:"y"},122:{c:"z"},126:{c:"~"},913:{c:"A"},914:{c:"B"},917:{c:"E"},918:{c:"Z"},919:{c:"H"},921:{c:"I"},922:{c:"K"},924:{c:"M"},925:{c:"N"},927:{c:"O"},929:{c:"P"},930:{c:"\\398"},932:{c:"T"},935:{c:"X"},978:{c:"\\3A5"},988:{c:"F"},8213:{c:"\\2014"},8215:{c:"_"},8260:{c:"/"},8462:{c:"h",f:"I"},8710:{c:"\\394"},10744:{c:"/",f:"I"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.italic={32:[0,0,.25],33:[.716,0,.307,{ic:.073}],34:[.694,-.379,.514],35:[.694,.194,.818],37:[.75,.056,.818],38:[.716,.022,.767],39:[.694,-.379,.307,{ic:.07}],40:[.75,.25,.409,{ic:.108}],41:[.75,.25,.409],42:[.75,-.32,.511,{ic:.073}],43:[.557,.057,.767],44:[.121,.194,.307],45:[.251,-.18,.358],46:[.121,0,.307],47:[.716,.215,.778],48:[.665,.021,.511,{ic:.051}],49:[.666,0,.511],50:[.666,.022,.511],51:[.666,.022,.511,{ic:.051}],52:[.666,.194,.511],53:[.666,.022,.511,{ic:.056}],54:[.665,.022,.511,{ic:.054}],55:[.666,.022,.511,{ic:.123}],56:[.666,.021,.511],57:[.666,.022,.511],58:[.431,0,.307],59:[.431,.194,.307],61:[.367,-.133,.767],63:[.716,0,.511],64:[.705,.011,.767],65:[.716,0,.75,{sk:.139}],66:[.683,0,.759,{sk:.0833}],67:[.705,.022,.715,{sk:.0833}],68:[.683,0,.828,{sk:.0556}],69:[.68,0,.738,{sk:.0833}],70:[.68,0,.643,{ic:.106,sk:.0833}],71:[.705,.022,.786,{sk:.0833}],72:[.683,0,.831,{ic:.057,sk:.0556}],73:[.683,0,.44,{ic:.064,sk:.111}],74:[.683,.022,.555,{ic:.078,sk:.167}],75:[.683,0,.849,{sk:.0556}],76:[.683,0,.681,{sk:.0278}],77:[.683,0,.97,{ic:.081,sk:.0833}],78:[.683,0,.803,{ic:.085,sk:.0833}],79:[.704,.022,.763,{sk:.0833}],80:[.683,0,.642,{ic:.109,sk:.0833}],81:[.704,.194,.791,{sk:.0833}],82:[.683,.021,.759,{sk:.0833}],83:[.705,.022,.613,{sk:.0833}],84:[.677,0,.584,{ic:.12,sk:.0833}],85:[.683,.022,.683,{ic:.084,sk:.0278}],86:[.683,.022,.583,{ic:.186}],87:[.683,.022,.944,{ic:.104}],88:[.683,0,.828,{sk:.0833}],89:[.683,0,.581,{ic:.182}],90:[.683,0,.683,{sk:.0833}],91:[.75,.25,.307,{ic:.139}],93:[.75,.25,.307,{ic:.052}],94:[.694,-.527,.511],95:[-.025,.062,.511],97:[.441,.01,.529],98:[.694,.011,.429],99:[.442,.011,.433,{sk:.0556}],100:[.694,.01,.52,{sk:.167}],101:[.442,.011,.466,{sk:.0556}],102:[.705,.205,.49,{ic:.06,sk:.167}],103:[.442,.205,.477,{sk:.0278}],104:[.694,.011,.576,{sk:-.0278}],105:[.661,.011,.345],106:[.661,.204,.412],107:[.694,.011,.521],108:[.694,.011,.298,{sk:.0833}],109:[.442,.011,.878],110:[.442,.011,.6],111:[.441,.011,.485,{sk:.0556}],112:[.442,.194,.503,{sk:.0833}],113:[.442,.194,.446,{sk:.0833}],114:[.442,.011,.451,{sk:.0556}],115:[.442,.01,.469,{sk:.0556}],116:[.626,.011,.361,{sk:.0833}],117:[.442,.011,.572,{sk:.0278}],118:[.443,.011,.485,{sk:.0278}],119:[.443,.011,.716,{sk:.0833}],120:[.442,.011,.572,{sk:.0278}],121:[.442,.205,.49,{sk:.0556}],122:[.442,.011,.465,{sk:.0556}],126:[.318,-.208,.511,{ic:.06}],160:[0,0,.25],163:[.714,.011,.769],305:[.441,.01,.307],567:[.442,.204,.332],768:[.697,-.5,0],769:[.697,-.5,0],770:[.694,-.527,0],771:[.668,-.558,0,{ic:.06}],772:[.589,-.544,0,{ic:.054}],774:[.694,-.515,0,{ic:.062}],775:[.669,-.548,0],776:[.669,-.554,0],778:[.716,-.542,0],779:[.697,-.503,0,{ic:.065}],780:[.638,-.502,0],913:[.716,0,.75,{sk:.139}],914:[.683,0,.759,{sk:.0833}],915:[.68,0,.615,{ic:.106,sk:.0833}],916:[.716,0,.833,{sk:.167}],917:[.68,0,.738,{sk:.0833}],918:[.683,0,.683,{sk:.0833}],919:[.683,0,.831,{ic:.057,sk:.0556}],920:[.704,.022,.763,{sk:.0833}],921:[.683,0,.44,{ic:.064,sk:.111}],922:[.683,0,.849,{sk:.0556}],923:[.716,0,.694,{sk:.167}],924:[.683,0,.97,{ic:.081,sk:.0833}],925:[.683,0,.803,{ic:.085,sk:.0833}],926:[.677,0,.742,{sk:.0833}],927:[.704,.022,.763,{sk:.0833}],928:[.68,0,.831,{ic:.056,sk:.0556}],929:[.683,0,.642,{ic:.109,sk:.0833}],930:[.704,.022,.763,{sk:.0833}],931:[.683,0,.78,{sk:.0833}],932:[.677,0,.584,{ic:.12,sk:.0833}],933:[.705,0,.583,{ic:.117,sk:.0556}],934:[.683,0,.667,{sk:.0833}],935:[.683,0,.828,{sk:.0833}],936:[.683,0,.612,{ic:.08,sk:.0556}],937:[.704,0,.772,{sk:.0833}],945:[.442,.011,.64,{sk:.0278}],946:[.705,.194,.566,{sk:.0833}],947:[.441,.216,.518],948:[.717,.01,.444,{sk:.0556}],949:[.452,.022,.466,{sk:.0833}],950:[.704,.204,.438,{sk:.0833}],951:[.442,.216,.497,{sk:.0556}],952:[.705,.01,.469,{sk:.0833}],953:[.442,.01,.354,{sk:.0556}],954:[.442,.011,.576],955:[.694,.012,.583],956:[.442,.216,.603,{sk:.0278}],957:[.442,0,.494,{sk:.0278}],958:[.704,.205,.438,{sk:.111}],959:[.441,.011,.485,{sk:.0556}],960:[.431,.011,.57],961:[.442,.216,.517,{sk:.0833}],962:[.442,.107,.363,{sk:.0833}],963:[.431,.011,.571],964:[.431,.013,.437,{ic:.08,sk:.0278}],965:[.443,.01,.54,{sk:.0278}],966:[.442,.218,.654,{sk:.0833}],967:[.442,.204,.626,{sk:.0556}],968:[.694,.205,.651,{sk:.111}],969:[.443,.011,.622],977:[.705,.011,.591,{sk:.0833}],978:[.705,0,.583,{ic:.117,sk:.0556}],981:[.694,.205,.596,{sk:.0833}],982:[.431,.01,.828],988:[.68,0,.643,{ic:.106,sk:.0833}],1009:[.442,.194,.517,{sk:.0833}],1013:[.431,.011,.406,{sk:.0556}],8211:[.285,-.248,.511],8212:[.285,-.248,1.022],8213:[.285,-.248,1.022],8215:[-.025,.062,.511],8216:[.694,-.379,.307,{ic:.055}],8217:[.694,-.379,.307,{ic:.07}],8220:[.694,-.379,.514,{ic:.092}],8221:[.694,-.379,.514],8260:[.716,.215,.778],8462:[.694,.011,.576,{sk:-.0278}],8463:[.695,.013,.54],8710:[.716,0,.833,{sk:.167}],10744:[.716,.215,.778]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(189);e.largeop=n.AddCSS(i.largeop,{32:{c:" "},40:{c:"("},41:{c:")"},47:{c:"/"},91:{c:"["},93:{c:"]"},123:{c:"{"},125:{c:"}"},8260:{c:"/"},9001:{c:"\\27E8"},9002:{c:"\\27E9"},10072:{c:"\\2223"},10764:{c:"\\222C\\222C"},12296:{c:"\\27E8"},12297:{c:"\\27E9"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.largeop={32:[0,0,.25],40:[1.15,.649,.597],41:[1.15,.649,.597],47:[1.15,.649,.811],91:[1.15,.649,.472],92:[1.15,.649,.811],93:[1.15,.649,.472],123:[1.15,.649,.667],125:[1.15,.649,.667],160:[0,0,.25],710:[.772,-.565,1],732:[.75,-.611,1],770:[.772,-.565,0],771:[.75,-.611,0],8214:[.602,0,.778],8260:[1.15,.649,.811],8593:[.6,0,.667],8595:[.6,0,.667],8657:[.599,0,.778],8659:[.6,0,.778],8719:[.95,.45,1.278],8720:[.95,.45,1.278],8721:[.95,.45,1.444],8730:[1.15,.65,1],8739:[.627,.015,.333],8741:[.627,.015,.556],8747:[1.36,.862,.556,{ic:.388}],8748:[1.36,.862,1.084,{ic:.388}],8749:[1.36,.862,1.592,{ic:.388}],8750:[1.36,.862,.556,{ic:.388}],8896:[.95,.45,1.111],8897:[.95,.45,1.111],8898:[.949,.45,1.111],8899:[.95,.449,1.111],8968:[1.15,.649,.528],8969:[1.15,.649,.528],8970:[1.15,.649,.528],8971:[1.15,.649,.528],9001:[1.15,.649,.611],9002:[1.15,.649,.611],9168:[.602,0,.667],10072:[.627,.015,.333],10216:[1.15,.649,.611],10217:[1.15,.649,.611],10752:[.949,.449,1.511],10753:[.949,.449,1.511],10754:[.949,.449,1.511],10756:[.95,.449,1.111],10758:[.95,.45,1.111],10764:[1.36,.862,2.168,{ic:.388}],12296:[1.15,.649,.611],12297:[1.15,.649,.611]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(191);e.monospace=n.AddCSS(i.monospace,{32:{c:" "},33:{c:"!"},35:{c:"#"},36:{c:"$"},37:{c:"%"},38:{c:"&"},40:{c:"("},41:{c:")"},42:{c:"*"},43:{c:"+"},44:{c:","},45:{c:"-"},46:{c:"."},47:{c:"/"},48:{c:"0"},49:{c:"1"},50:{c:"2"},51:{c:"3"},52:{c:"4"},53:{c:"5"},54:{c:"6"},55:{c:"7"},56:{c:"8"},57:{c:"9"},58:{c:":"},59:{c:";"},60:{c:"<"},61:{c:"="},62:{c:">"},63:{c:"?"},64:{c:"@"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},91:{c:"["},93:{c:"]"},94:{c:"^"},95:{c:"_"},96:{c:"`"},97:{c:"a"},98:{c:"b"},99:{c:"c"},100:{c:"d"},101:{c:"e"},102:{c:"f"},103:{c:"g"},104:{c:"h"},105:{c:"i"},106:{c:"j"},107:{c:"k"},108:{c:"l"},109:{c:"m"},110:{c:"n"},111:{c:"o"},112:{c:"p"},113:{c:"q"},114:{c:"r"},115:{c:"s"},116:{c:"t"},117:{c:"u"},118:{c:"v"},119:{c:"w"},120:{c:"x"},121:{c:"y"},122:{c:"z"},123:{c:"{"},124:{c:"|"},125:{c:"}"},126:{c:"~"},697:{c:"\\2032"},913:{c:"A"},914:{c:"B"},917:{c:"E"},918:{c:"Z"},919:{c:"H"},921:{c:"I"},922:{c:"K"},924:{c:"M"},925:{c:"N"},927:{c:"O"},929:{c:"P"},930:{c:"\\398"},932:{c:"T"},935:{c:"X"},978:{c:"\\3A5"},988:{c:"F"},8215:{c:"_"},8243:{c:"\\2032\\2032"},8244:{c:"\\2032\\2032\\2032"},8260:{c:"/"},8279:{c:"\\2032\\2032\\2032\\2032"},8710:{c:"\\394"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.monospace={32:[0,0,.525],33:[.622,0,.525],34:[.623,-.333,.525],35:[.611,0,.525],36:[.694,.082,.525],37:[.694,.083,.525],38:[.622,.011,.525],39:[.611,-.287,.525],40:[.694,.082,.525],41:[.694,.082,.525],42:[.52,-.09,.525],43:[.531,-.081,.525],44:[.14,.139,.525],45:[.341,-.271,.525],46:[.14,0,.525],47:[.694,.083,.525],48:[.621,.01,.525],49:[.622,0,.525],50:[.622,0,.525],51:[.622,.011,.525],52:[.624,0,.525],53:[.611,.01,.525],54:[.622,.011,.525],55:[.627,.01,.525],56:[.621,.01,.525],57:[.622,.011,.525],58:[.431,0,.525],59:[.431,.139,.525],60:[.557,-.055,.525],61:[.417,-.195,.525],62:[.557,-.055,.525],63:[.617,0,.525],64:[.617,.006,.525],65:[.623,0,.525],66:[.611,0,.525],67:[.622,.011,.525],68:[.611,0,.525],69:[.611,0,.525],70:[.611,0,.525],71:[.622,.011,.525],72:[.611,0,.525],73:[.611,0,.525],74:[.611,.011,.525],75:[.611,0,.525],76:[.611,0,.525],77:[.611,0,.525],78:[.611,0,.525],79:[.621,.01,.525],80:[.611,0,.525],81:[.621,.138,.525],82:[.611,.011,.525],83:[.622,.011,.525],84:[.611,0,.525],85:[.611,.011,.525],86:[.611,.007,.525],87:[.611,.007,.525],88:[.611,0,.525],89:[.611,0,.525],90:[.611,0,.525],91:[.694,.082,.525],92:[.694,.083,.525],93:[.694,.082,.525],94:[.611,-.46,.525],95:[-.025,.095,.525],96:[.681,-.357,.525],97:[.439,.006,.525],98:[.611,.006,.525],99:[.44,.006,.525],100:[.611,.006,.525],101:[.44,.006,.525],102:[.617,0,.525],103:[.442,.229,.525],104:[.611,0,.525],105:[.612,0,.525],106:[.612,.228,.525],107:[.611,0,.525],108:[.611,0,.525],109:[.436,0,.525],110:[.436,0,.525],111:[.44,.006,.525],112:[.437,.221,.525],113:[.437,.221,.525],114:[.437,0,.525],115:[.44,.006,.525],116:[.554,.006,.525],117:[.431,.005,.525],118:[.431,0,.525],119:[.431,0,.525],120:[.431,0,.525],121:[.431,.228,.525],122:[.431,0,.525],123:[.694,.083,.525],124:[.694,.082,.525],125:[.694,.083,.525],126:[.611,-.466,.525],127:[.612,-.519,.525],160:[0,0,.525],305:[.431,0,.525],567:[.431,.228,.525],697:[.623,-.334,.525],768:[.611,-.485,0],769:[.611,-.485,0],770:[.611,-.46,0],771:[.611,-.466,0],772:[.577,-.5,0],774:[.611,-.504,0],776:[.612,-.519,0],778:[.619,-.499,0],780:[.577,-.449,0],913:[.623,0,.525],914:[.611,0,.525],915:[.611,0,.525],916:[.623,0,.525],917:[.611,0,.525],918:[.611,0,.525],919:[.611,0,.525],920:[.621,.01,.525],921:[.611,0,.525],922:[.611,0,.525],923:[.623,0,.525],924:[.611,0,.525],925:[.611,0,.525],926:[.611,0,.525],927:[.621,.01,.525],928:[.611,0,.525],929:[.611,0,.525],930:[.621,.01,.525],931:[.611,0,.525],932:[.611,0,.525],933:[.622,0,.525],934:[.611,0,.525],935:[.611,0,.525],936:[.611,0,.525],937:[.622,0,.525],978:[.622,0,.525],988:[.611,0,.525],8215:[-.025,.095,.525],8242:[.623,-.334,.525],8243:[.623,0,1.05],8244:[.623,0,1.575],8260:[.694,.083,.525],8279:[.623,0,2.1],8710:[.623,0,.525]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(193);e.normal=n.AddCSS(i.normal,{32:{c:" "},33:{c:"!"},35:{c:"#"},36:{c:"$"},37:{c:"%"},38:{c:"&"},40:{c:"("},41:{c:")"},42:{c:"*"},43:{c:"+"},44:{c:","},45:{c:"-"},46:{c:"."},47:{c:"/"},48:{c:"0"},49:{c:"1"},50:{c:"2"},51:{c:"3"},52:{c:"4"},53:{c:"5"},54:{c:"6"},55:{c:"7"},56:{c:"8"},57:{c:"9"},58:{c:":"},59:{c:";"},60:{c:"<"},61:{c:"="},62:{c:">"},63:{c:"?"},64:{c:"@"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},91:{c:"["},93:{c:"]"},94:{c:"^"},95:{c:"_"},96:{c:"`"},97:{c:"a"},98:{c:"b"},99:{c:"c"},100:{c:"d"},101:{c:"e"},102:{c:"f"},103:{c:"g"},104:{c:"h"},105:{c:"i"},106:{c:"j"},107:{c:"k"},108:{c:"l"},109:{c:"m"},110:{c:"n"},111:{c:"o"},112:{c:"p"},113:{c:"q"},114:{c:"r"},115:{c:"s"},116:{c:"t"},117:{c:"u"},118:{c:"v"},119:{c:"w"},120:{c:"x"},121:{c:"y"},122:{c:"z"},123:{c:"{"},124:{c:"|"},125:{c:"}"},126:{c:"~"},163:{f:"MI"},183:{c:"\\22C5"},697:{c:"\\2032"},913:{c:"A"},914:{c:"B"},917:{c:"E"},918:{c:"Z"},919:{c:"H"},921:{c:"I"},922:{c:"K"},924:{c:"M"},925:{c:"N"},927:{c:"O"},929:{c:"P"},930:{c:"\\398"},932:{c:"T"},935:{c:"X"},978:{c:"\\3A5"},988:{c:"F"},1014:{c:"\\220D"},8192:{c:""},8193:{c:""},8194:{c:""},8195:{c:""},8196:{c:""},8197:{c:""},8198:{c:""},8201:{c:""},8202:{c:""},8203:{c:""},8204:{c:""},8213:{c:"\\2014"},8214:{c:"\\2225"},8215:{c:"_"},8226:{c:"\\2219"},8243:{c:"\\2032\\2032"},8244:{c:"\\2032\\2032\\2032"},8246:{c:"\\2035\\2035"},8247:{c:"\\2035\\2035\\2035"},8254:{c:"\\2C9"},8260:{c:"/"},8279:{c:"\\2032\\2032\\2032\\2032"},8289:{c:""},8290:{c:""},8291:{c:""},8292:{c:""},8407:{c:"\\2192",f:"V"},8450:{c:"C",f:"A"},8459:{c:"H",f:"SC"},8460:{c:"H",f:"FR"},8461:{c:"H",f:"A"},8462:{c:"h",f:"I"},8463:{f:""},8464:{c:"J",f:"SC"},8465:{c:"I",f:"FR"},8466:{c:"L",f:"SC"},8469:{c:"N",f:"A"},8473:{c:"P",f:"A"},8474:{c:"Q",f:"A"},8475:{c:"R",f:"SC"},8476:{c:"R",f:"FR"},8477:{c:"R",f:"A"},8484:{c:"Z",f:"A"},8486:{c:"\\3A9",f:""},8488:{c:"Z",f:"FR"},8492:{c:"B",f:"SC"},8493:{c:"C",f:"FR"},8496:{c:"E",f:"SC"},8497:{c:"F",f:"SC"},8499:{c:"M",f:"SC"},8708:{c:"\\2203\\338"},8710:{c:"\\394"},8716:{c:"\\220B\\338"},8726:{f:""},8772:{c:"\\2243\\338"},8775:{c:"\\2246"},8777:{c:"\\2248\\338"},8802:{c:"\\2261\\338"},8813:{c:"\\224D\\338"},8820:{c:"\\2272\\338"},8821:{c:"\\2273\\338"},8824:{c:"\\2276\\338"},8825:{c:"\\2277\\338"},8836:{c:"\\2282\\338"},8837:{c:"\\2283\\338"},8930:{c:"\\2291\\338"},8931:{c:"\\2292\\338"},8965:{c:"\\22BC"},8966:{c:"\\2A5E"},8988:{c:"\\250C"},8989:{c:"\\2510"},8990:{c:"\\2514"},8991:{c:"\\2518"},9001:{c:"\\27E8"},9002:{c:"\\27E9"},9642:{c:"\\25A0"},9652:{c:"\\25B2"},9653:{c:"\\25B3"},9656:{c:"\\25B6"},9662:{c:"\\25BC"},9663:{c:"\\25BD"},9666:{c:"\\25C0"},9723:{c:"\\25A1"},9724:{c:"\\25A0"},10072:{c:"\\2223"},10744:{c:"/",f:"I"},10764:{c:"\\222C\\222C"},10799:{c:"\\D7"},12296:{c:"\\27E8"},12297:{c:"\\27E9"},119808:{c:"A",f:"B"},119809:{c:"B",f:"B"},119810:{c:"C",f:"B"},119811:{c:"D",f:"B"},119812:{c:"E",f:"B"},119813:{c:"F",f:"B"},119814:{c:"G",f:"B"},119815:{c:"H",f:"B"},119816:{c:"I",f:"B"},119817:{c:"J",f:"B"},119818:{c:"K",f:"B"},119819:{c:"L",f:"B"},119820:{c:"M",f:"B"},119821:{c:"N",f:"B"},119822:{c:"O",f:"B"},119823:{c:"P",f:"B"},119824:{c:"Q",f:"B"},119825:{c:"R",f:"B"},119826:{c:"S",f:"B"},119827:{c:"T",f:"B"},119828:{c:"U",f:"B"},119829:{c:"V",f:"B"},119830:{c:"W",f:"B"},119831:{c:"X",f:"B"},119832:{c:"Y",f:"B"},119833:{c:"Z",f:"B"},119834:{c:"a",f:"B"},119835:{c:"b",f:"B"},119836:{c:"c",f:"B"},119837:{c:"d",f:"B"},119838:{c:"e",f:"B"},119839:{c:"f",f:"B"},119840:{c:"g",f:"B"},119841:{c:"h",f:"B"},119842:{c:"i",f:"B"},119843:{c:"j",f:"B"},119844:{c:"k",f:"B"},119845:{c:"l",f:"B"},119846:{c:"m",f:"B"},119847:{c:"n",f:"B"},119848:{c:"o",f:"B"},119849:{c:"p",f:"B"},119850:{c:"q",f:"B"},119851:{c:"r",f:"B"},119852:{c:"s",f:"B"},119853:{c:"t",f:"B"},119854:{c:"u",f:"B"},119855:{c:"v",f:"B"},119856:{c:"w",f:"B"},119857:{c:"x",f:"B"},119858:{c:"y",f:"B"},119859:{c:"z",f:"B"},119860:{c:"A",f:"I"},119861:{c:"B",f:"I"},119862:{c:"C",f:"I"},119863:{c:"D",f:"I"},119864:{c:"E",f:"I"},119865:{c:"F",f:"I"},119866:{c:"G",f:"I"},119867:{c:"H",f:"I"},119868:{c:"I",f:"I"},119869:{c:"J",f:"I"},119870:{c:"K",f:"I"},119871:{c:"L",f:"I"},119872:{c:"M",f:"I"},119873:{c:"N",f:"I"},119874:{c:"O",f:"I"},119875:{c:"P",f:"I"},119876:{c:"Q",f:"I"},119877:{c:"R",f:"I"},119878:{c:"S",f:"I"},119879:{c:"T",f:"I"},119880:{c:"U",f:"I"},119881:{c:"V",f:"I"},119882:{c:"W",f:"I"},119883:{c:"X",f:"I"},119884:{c:"Y",f:"I"},119885:{c:"Z",f:"I"},119886:{c:"a",f:"I"},119887:{c:"b",f:"I"},119888:{c:"c",f:"I"},119889:{c:"d",f:"I"},119890:{c:"e",f:"I"},119891:{c:"f",f:"I"},119892:{c:"g",f:"I"},119893:{c:"h",f:"I"},119894:{c:"i",f:"I"},119895:{c:"j",f:"I"},119896:{c:"k",f:"I"},119897:{c:"l",f:"I"},119898:{c:"m",f:"I"},119899:{c:"n",f:"I"},119900:{c:"o",f:"I"},119901:{c:"p",f:"I"},119902:{c:"q",f:"I"},119903:{c:"r",f:"I"},119904:{c:"s",f:"I"},119905:{c:"t",f:"I"},119906:{c:"u",f:"I"},119907:{c:"v",f:"I"},119908:{c:"w",f:"I"},119909:{c:"x",f:"I"},119910:{c:"y",f:"I"},119911:{c:"z",f:"I"},119912:{c:"A",f:"BI"},119913:{c:"B",f:"BI"},119914:{c:"C",f:"BI"},119915:{c:"D",f:"BI"},119916:{c:"E",f:"BI"},119917:{c:"F",f:"BI"},119918:{c:"G",f:"BI"},119919:{c:"H",f:"BI"},119920:{c:"I",f:"BI"},119921:{c:"J",f:"BI"},119922:{c:"K",f:"BI"},119923:{c:"L",f:"BI"},119924:{c:"M",f:"BI"},119925:{c:"N",f:"BI"},119926:{c:"O",f:"BI"},119927:{c:"P",f:"BI"},119928:{c:"Q",f:"BI"},119929:{c:"R",f:"BI"},119930:{c:"S",f:"BI"},119931:{c:"T",f:"BI"},119932:{c:"U",f:"BI"},119933:{c:"V",f:"BI"},119934:{c:"W",f:"BI"},119935:{c:"X",f:"BI"},119936:{c:"Y",f:"BI"},119937:{c:"Z",f:"BI"},119938:{c:"a",f:"BI"},119939:{c:"b",f:"BI"},119940:{c:"c",f:"BI"},119941:{c:"d",f:"BI"},119942:{c:"e",f:"BI"},119943:{c:"f",f:"BI"},119944:{c:"g",f:"BI"},119945:{c:"h",f:"BI"},119946:{c:"i",f:"BI"},119947:{c:"j",f:"BI"},119948:{c:"k",f:"BI"},119949:{c:"l",f:"BI"},119950:{c:"m",f:"BI"},119951:{c:"n",f:"BI"},119952:{c:"o",f:"BI"},119953:{c:"p",f:"BI"},119954:{c:"q",f:"BI"},119955:{c:"r",f:"BI"},119956:{c:"s",f:"BI"},119957:{c:"t",f:"BI"},119958:{c:"u",f:"BI"},119959:{c:"v",f:"BI"},119960:{c:"w",f:"BI"},119961:{c:"x",f:"BI"},119962:{c:"y",f:"BI"},119963:{c:"z",f:"BI"},119964:{c:"A",f:"SC"},119965:{c:"B",f:"SC"},119966:{c:"C",f:"SC"},119967:{c:"D",f:"SC"},119968:{c:"E",f:"SC"},119969:{c:"F",f:"SC"},119970:{c:"G",f:"SC"},119971:{c:"H",f:"SC"},119972:{c:"I",f:"SC"},119973:{c:"J",f:"SC"},119974:{c:"K",f:"SC"},119975:{c:"L",f:"SC"},119976:{c:"M",f:"SC"},119977:{c:"N",f:"SC"},119978:{c:"O",f:"SC"},119979:{c:"P",f:"SC"},119980:{c:"Q",f:"SC"},119981:{c:"R",f:"SC"},119982:{c:"S",f:"SC"},119983:{c:"T",f:"SC"},119984:{c:"U",f:"SC"},119985:{c:"V",f:"SC"},119986:{c:"W",f:"SC"},119987:{c:"X",f:"SC"},119988:{c:"Y",f:"SC"},119989:{c:"Z",f:"SC"},119990:{c:"a",f:"I"},119991:{c:"b",f:"I"},119992:{c:"c",f:"I"},119993:{c:"d",f:"I"},119994:{c:"e",f:"I"},119995:{c:"f",f:"I"},119996:{c:"g",f:"I"},119997:{c:"h",f:"I"},119998:{c:"i",f:"I"},119999:{c:"j",f:"I"},12e4:{c:"k",f:"I"},120001:{c:"l",f:"I"},120002:{c:"m",f:"I"},120003:{c:"n",f:"I"},120004:{c:"o",f:"I"},120005:{c:"p",f:"I"},120006:{c:"q",f:"I"},120007:{c:"r",f:"I"},120008:{c:"s",f:"I"},120009:{c:"t",f:"I"},120010:{c:"u",f:"I"},120011:{c:"v",f:"I"},120012:{c:"w",f:"I"},120013:{c:"x",f:"I"},120014:{c:"y",f:"I"},120015:{c:"z",f:"I"},120016:{c:"A",f:"SC"},120017:{c:"B",f:"SC"},120018:{c:"C",f:"SC"},120019:{c:"D",f:"SC"},120020:{c:"E",f:"SC"},120021:{c:"F",f:"SC"},120022:{c:"G",f:"SC"},120023:{c:"H",f:"SC"},120024:{c:"I",f:"SC"},120025:{c:"J",f:"SC"},120026:{c:"K",f:"SC"},120027:{c:"L",f:"SC"},120028:{c:"M",f:"SC"},120029:{c:"N",f:"SC"},120030:{c:"O",f:"SC"},120031:{c:"P",f:"SC"},120032:{c:"Q",f:"SC"},120033:{c:"R",f:"SC"},120034:{c:"S",f:"SC"},120035:{c:"T",f:"SC"},120036:{c:"U",f:"SC"},120037:{c:"V",f:"SC"},120038:{c:"W",f:"SC"},120039:{c:"X",f:"SC"},120040:{c:"Y",f:"SC"},120041:{c:"Z",f:"SC"},120042:{c:"a",f:"BI"},120043:{c:"b",f:"BI"},120044:{c:"c",f:"BI"},120045:{c:"d",f:"BI"},120046:{c:"e",f:"BI"},120047:{c:"f",f:"BI"},120048:{c:"g",f:"BI"},120049:{c:"h",f:"BI"},120050:{c:"i",f:"BI"},120051:{c:"j",f:"BI"},120052:{c:"k",f:"BI"},120053:{c:"l",f:"BI"},120054:{c:"m",f:"BI"},120055:{c:"n",f:"BI"},120056:{c:"o",f:"BI"},120057:{c:"p",f:"BI"},120058:{c:"q",f:"BI"},120059:{c:"r",f:"BI"},120060:{c:"s",f:"BI"},120061:{c:"t",f:"BI"},120062:{c:"u",f:"BI"},120063:{c:"v",f:"BI"},120064:{c:"w",f:"BI"},120065:{c:"x",f:"BI"},120066:{c:"y",f:"BI"},120067:{c:"z",f:"BI"},120068:{c:"A",f:"FR"},120069:{c:"B",f:"FR"},120070:{c:"C",f:"FR"},120071:{c:"D",f:"FR"},120072:{c:"E",f:"FR"},120073:{c:"F",f:"FR"},120074:{c:"G",f:"FR"},120075:{c:"H",f:"FR"},120076:{c:"I",f:"FR"},120077:{c:"J",f:"FR"},120078:{c:"K",f:"FR"},120079:{c:"L",f:"FR"},120080:{c:"M",f:"FR"},120081:{c:"N",f:"FR"},120082:{c:"O",f:"FR"},120083:{c:"P",f:"FR"},120084:{c:"Q",f:"FR"},120085:{c:"R",f:"FR"},120086:{c:"S",f:"FR"},120087:{c:"T",f:"FR"},120088:{c:"U",f:"FR"},120089:{c:"V",f:"FR"},120090:{c:"W",f:"FR"},120091:{c:"X",f:"FR"},120092:{c:"Y",f:"FR"},120093:{c:"Z",f:"FR"},120094:{c:"a",f:"FR"},120095:{c:"b",f:"FR"},120096:{c:"c",f:"FR"},120097:{c:"d",f:"FR"},120098:{c:"e",f:"FR"},120099:{c:"f",f:"FR"},120100:{c:"g",f:"FR"},120101:{c:"h",f:"FR"},120102:{c:"i",f:"FR"},120103:{c:"j",f:"FR"},120104:{c:"k",f:"FR"},120105:{c:"l",f:"FR"},120106:{c:"m",f:"FR"},120107:{c:"n",f:"FR"},120108:{c:"o",f:"FR"},120109:{c:"p",f:"FR"},120110:{c:"q",f:"FR"},120111:{c:"r",f:"FR"},120112:{c:"s",f:"FR"},120113:{c:"t",f:"FR"},120114:{c:"u",f:"FR"},120115:{c:"v",f:"FR"},120116:{c:"w",f:"FR"},120117:{c:"x",f:"FR"},120118:{c:"y",f:"FR"},120119:{c:"z",f:"FR"},120120:{c:"A",f:"A"},120121:{c:"B",f:"A"},120122:{c:"C",f:"A"},120123:{c:"D",f:"A"},120124:{c:"E",f:"A"},120125:{c:"F",f:"A"},120126:{c:"G",f:"A"},120127:{c:"H",f:"A"},120128:{c:"I",f:"A"},120129:{c:"J",f:"A"},120130:{c:"K",f:"A"},120131:{c:"L",f:"A"},120132:{c:"M",f:"A"},120133:{c:"N",f:"A"},120134:{c:"O",f:"A"},120135:{c:"P",f:"A"},120136:{c:"Q",f:"A"},120137:{c:"R",f:"A"},120138:{c:"S",f:"A"},120139:{c:"T",f:"A"},120140:{c:"U",f:"A"},120141:{c:"V",f:"A"},120142:{c:"W",f:"A"},120143:{c:"X",f:"A"},120144:{c:"Y",f:"A"},120145:{c:"Z",f:"A"},120146:{c:"a",f:"B"},120147:{c:"b",f:"B"},120148:{c:"c",f:"B"},120149:{c:"d",f:"B"},120150:{c:"e",f:"B"},120151:{c:"f",f:"B"},120152:{c:"g",f:"B"},120153:{c:"h",f:"B"},120154:{c:"i",f:"B"},120155:{c:"j",f:"B"},120156:{c:"k",f:"A"},120157:{c:"l",f:"B"},120158:{c:"m",f:"B"},120159:{c:"n",f:"B"},120160:{c:"o",f:"B"},120161:{c:"p",f:"B"},120162:{c:"q",f:"B"},120163:{c:"r",f:"B"},120164:{c:"s",f:"B"},120165:{c:"t",f:"B"},120166:{c:"u",f:"B"},120167:{c:"v",f:"B"},120168:{c:"w",f:"B"},120169:{c:"x",f:"B"},120170:{c:"y",f:"B"},120171:{c:"z",f:"B"},120172:{c:"A",f:"FR-B"},120173:{c:"B",f:"FR-B"},120174:{c:"C",f:"FR-B"},120175:{c:"D",f:"FR-B"},120176:{c:"E",f:"FR-B"},120177:{c:"F",f:"FR-B"},120178:{c:"G",f:"FR-B"},120179:{c:"H",f:"FR-B"},120180:{c:"I",f:"FR-B"},120181:{c:"J",f:"FR-B"},120182:{c:"K",f:"FR-B"},120183:{c:"L",f:"FR-B"},120184:{c:"M",f:"FR-B"},120185:{c:"N",f:"FR-B"},120186:{c:"O",f:"FR-B"},120187:{c:"P",f:"FR-B"},120188:{c:"Q",f:"FR-B"},120189:{c:"R",f:"FR-B"},120190:{c:"S",f:"FR-B"},120191:{c:"T",f:"FR-B"},120192:{c:"U",f:"FR-B"},120193:{c:"V",f:"FR-B"},120194:{c:"W",f:"FR-B"},120195:{c:"X",f:"FR-B"},120196:{c:"Y",f:"FR-B"},120197:{c:"Z",f:"FR-B"},120198:{c:"a",f:"FR-B"},120199:{c:"b",f:"FR-B"},120200:{c:"c",f:"FR-B"},120201:{c:"d",f:"FR-B"},120202:{c:"e",f:"FR-B"},120203:{c:"f",f:"FR-B"},120204:{c:"g",f:"FR-B"},120205:{c:"h",f:"FR-B"},120206:{c:"i",f:"FR-B"},120207:{c:"j",f:"FR-B"},120208:{c:"k",f:"FR-B"},120209:{c:"l",f:"FR-B"},120210:{c:"m",f:"FR-B"},120211:{c:"n",f:"FR-B"},120212:{c:"o",f:"FR-B"},120213:{c:"p",f:"FR-B"},120214:{c:"q",f:"FR-B"},120215:{c:"r",f:"FR-B"},120216:{c:"s",f:"FR-B"},120217:{c:"t",f:"FR-B"},120218:{c:"u",f:"FR-B"},120219:{c:"v",f:"FR-B"},120220:{c:"w",f:"FR-B"},120221:{c:"x",f:"FR-B"},120222:{c:"y",f:"FR-B"},120223:{c:"z",f:"FR-B"},120224:{c:"A",f:"SS"},120225:{c:"B",f:"SS"},120226:{c:"C",f:"SS"},120227:{c:"D",f:"SS"},120228:{c:"E",f:"SS"},120229:{c:"F",f:"SS"},120230:{c:"G",f:"SS"},120231:{c:"H",f:"SS"},120232:{c:"I",f:"SS"},120233:{c:"J",f:"SS"},120234:{c:"K",f:"SS"},120235:{c:"L",f:"SS"},120236:{c:"M",f:"SS"},120237:{c:"N",f:"SS"},120238:{c:"O",f:"SS"},120239:{c:"P",f:"SS"},120240:{c:"Q",f:"SS"},120241:{c:"R",f:"SS"},120242:{c:"S",f:"SS"},120243:{c:"T",f:"SS"},120244:{c:"U",f:"SS"},120245:{c:"V",f:"SS"},120246:{c:"W",f:"SS"},120247:{c:"X",f:"SS"},120248:{c:"Y",f:"SS"},120249:{c:"Z",f:"SS"},120250:{c:"a",f:"SS"},120251:{c:"b",f:"SS"},120252:{c:"c",f:"SS"},120253:{c:"d",f:"SS"},120254:{c:"e",f:"SS"},120255:{c:"f",f:"SS"},120256:{c:"g",f:"SS"},120257:{c:"h",f:"SS"},120258:{c:"i",f:"SS"},120259:{c:"j",f:"SS"},120260:{c:"k",f:"SS"},120261:{c:"l",f:"SS"},120262:{c:"m",f:"SS"},120263:{c:"n",f:"SS"},120264:{c:"o",f:"SS"},120265:{c:"p",f:"SS"},120266:{c:"q",f:"SS"},120267:{c:"r",f:"SS"},120268:{c:"s",f:"SS"},120269:{c:"t",f:"SS"},120270:{c:"u",f:"SS"},120271:{c:"v",f:"SS"},120272:{c:"w",f:"SS"},120273:{c:"x",f:"SS"},120274:{c:"y",f:"SS"},120275:{c:"z",f:"SS"},120276:{c:"A",f:"SS-B"},120277:{c:"B",f:"SS-B"},120278:{c:"C",f:"SS-B"},120279:{c:"D",f:"SS-B"},120280:{c:"E",f:"SS-B"},120281:{c:"F",f:"SS-B"},120282:{c:"G",f:"SS-B"},120283:{c:"H",f:"SS-B"},120284:{c:"I",f:"SS-B"},120285:{c:"J",f:"SS-B"},120286:{c:"K",f:"SS-B"},120287:{c:"L",f:"SS-B"},120288:{c:"M",f:"SS-B"},120289:{c:"N",f:"SS-B"},120290:{c:"O",f:"SS-B"},120291:{c:"P",f:"SS-B"},120292:{c:"Q",f:"SS-B"},120293:{c:"R",f:"SS-B"},120294:{c:"S",f:"SS-B"},120295:{c:"T",f:"SS-B"},120296:{c:"U",f:"SS-B"},120297:{c:"V",f:"SS-B"},120298:{c:"W",f:"SS-B"},120299:{c:"X",f:"SS-B"},120300:{c:"Y",f:"SS-B"},120301:{c:"Z",f:"SS-B"},120302:{c:"a",f:"SS-B"},120303:{c:"b",f:"SS-B"},120304:{c:"c",f:"SS-B"},120305:{c:"d",f:"SS-B"},120306:{c:"e",f:"SS-B"},120307:{c:"f",f:"SS-B"},120308:{c:"g",f:"SS-B"},120309:{c:"h",f:"SS-B"},120310:{c:"i",f:"SS-B"},120311:{c:"j",f:"SS-B"},120312:{c:"k",f:"SS-B"},120313:{c:"l",f:"SS-B"},120314:{c:"m",f:"SS-B"},120315:{c:"n",f:"SS-B"},120316:{c:"o",f:"SS-B"},120317:{c:"p",f:"SS-B"},120318:{c:"q",f:"SS-B"},120319:{c:"r",f:"SS-B"},120320:{c:"s",f:"SS-B"},120321:{c:"t",f:"SS-B"},120322:{c:"u",f:"SS-B"},120323:{c:"v",f:"SS-B"},120324:{c:"w",f:"SS-B"},120325:{c:"x",f:"SS-B"},120326:{c:"y",f:"SS-B"},120327:{c:"z",f:"SS-B"},120328:{c:"A",f:"SS-I"},120329:{c:"B",f:"SS-I"},120330:{c:"C",f:"SS-I"},120331:{c:"D",f:"SS-I"},120332:{c:"E",f:"SS-I"},120333:{c:"F",f:"SS-I"},120334:{c:"G",f:"SS-I"},120335:{c:"H",f:"SS-I"},120336:{c:"I",f:"SS-I"},120337:{c:"J",f:"SS-I"},120338:{c:"K",f:"SS-I"},120339:{c:"L",f:"SS-I"},120340:{c:"M",f:"SS-I"},120341:{c:"N",f:"SS-I"},120342:{c:"O",f:"SS-I"},120343:{c:"P",f:"SS-I"},120344:{c:"Q",f:"SS-I"},120345:{c:"R",f:"SS-I"},120346:{c:"S",f:"SS-I"},120347:{c:"T",f:"SS-I"},120348:{c:"U",f:"SS-I"},120349:{c:"V",f:"SS-I"},120350:{c:"W",f:"SS-I"},120351:{c:"X",f:"SS-I"},120352:{c:"Y",f:"SS-I"},120353:{c:"Z",f:"SS-I"},120354:{c:"a",f:"SS-I"},120355:{c:"b",f:"SS-I"},120356:{c:"c",f:"SS-I"},120357:{c:"d",f:"SS-I"},120358:{c:"e",f:"SS-I"},120359:{c:"f",f:"SS-I"},120360:{c:"g",f:"SS-I"},120361:{c:"h",f:"SS-I"},120362:{c:"i",f:"SS-I"},120363:{c:"j",f:"SS-I"},120364:{c:"k",f:"SS-I"},120365:{c:"l",f:"SS-I"},120366:{c:"m",f:"SS-I"},120367:{c:"n",f:"SS-I"},120368:{c:"o",f:"SS-I"},120369:{c:"p",f:"SS-I"},120370:{c:"q",f:"SS-I"},120371:{c:"r",f:"SS-I"},120372:{c:"s",f:"SS-I"},120373:{c:"t",f:"SS-I"},120374:{c:"u",f:"SS-I"},120375:{c:"v",f:"SS-I"},120376:{c:"w",f:"SS-I"},120377:{c:"x",f:"SS-I"},120378:{c:"y",f:"SS-I"},120379:{c:"z",f:"SS-I"},120380:{c:"A",f:"SS-I"},120381:{c:"B",f:"SS-I"},120382:{c:"C",f:"SS-I"},120383:{c:"D",f:"SS-I"},120384:{c:"E",f:"SS-I"},120385:{c:"F",f:"SS-I"},120386:{c:"G",f:"SS-I"},120387:{c:"H",f:"SS-I"},120388:{c:"I",f:"SS-I"},120389:{c:"J",f:"SS-I"},120390:{c:"K",f:"SS-I"},120391:{c:"L",f:"SS-I"},120392:{c:"M",f:"SS-I"},120393:{c:"N",f:"SS-I"},120394:{c:"O",f:"SS-I"},120395:{c:"P",f:"SS-I"},120396:{c:"Q",f:"SS-I"},120397:{c:"R",f:"SS-I"},120398:{c:"S",f:"SS-I"},120399:{c:"T",f:"SS-I"},120400:{c:"U",f:"SS-I"},120401:{c:"V",f:"SS-I"},120402:{c:"W",f:"SS-I"},120403:{c:"X",f:"SS-I"},120404:{c:"Y",f:"SS-I"},120405:{c:"Z",f:"SS-I"},120406:{c:"a",f:"SS-I"},120407:{c:"b",f:"SS-I"},120408:{c:"c",f:"SS-I"},120409:{c:"d",f:"SS-I"},120410:{c:"e",f:"SS-I"},120411:{c:"f",f:"SS-I"},120412:{c:"g",f:"SS-I"},120413:{c:"h",f:"SS-I"},120414:{c:"i",f:"SS-I"},120415:{c:"j",f:"SS-I"},120416:{c:"k",f:"SS-I"},120417:{c:"l",f:"SS-I"},120418:{c:"m",f:"SS-I"},120419:{c:"n",f:"SS-I"},120420:{c:"o",f:"SS-I"},120421:{c:"p",f:"SS-I"},120422:{c:"q",f:"SS-I"},120423:{c:"r",f:"SS-I"},120424:{c:"s",f:"SS-I"},120425:{c:"t",f:"SS-I"},120426:{c:"u",f:"SS-I"},120427:{c:"v",f:"SS-I"},120428:{c:"w",f:"SS-I"},120429:{c:"x",f:"SS-I"},120430:{c:"y",f:"SS-I"},120431:{c:"z",f:"SS-I"},120432:{c:"A",f:"T"},120433:{c:"B",f:"T"},120434:{c:"C",f:"T"},120435:{c:"D",f:"T"},120436:{c:"E",f:"T"},120437:{c:"F",f:"T"},120438:{c:"G",f:"T"},120439:{c:"H",f:"T"},120440:{c:"I",f:"T"},120441:{c:"J",f:"T"},120442:{c:"K",f:"T"},120443:{c:"L",f:"T"},120444:{c:"M",f:"T"},120445:{c:"N",f:"T"},120446:{c:"O",f:"T"},120447:{c:"P",f:"T"},120448:{c:"Q",f:"T"},120449:{c:"R",f:"T"},120450:{c:"S",f:"T"},120451:{c:"T",f:"T"},120452:{c:"U",f:"T"},120453:{c:"V",f:"T"},120454:{c:"W",f:"T"},120455:{c:"X",f:"T"},120456:{c:"Y",f:"T"},120457:{c:"Z",f:"T"},120458:{c:"a",f:"T"},120459:{c:"b",f:"T"},120460:{c:"c",f:"T"},120461:{c:"d",f:"T"},120462:{c:"e",f:"T"},120463:{c:"f",f:"T"},120464:{c:"g",f:"T"},120465:{c:"h",f:"T"},120466:{c:"i",f:"T"},120467:{c:"j",f:"T"},120468:{c:"k",f:"T"},120469:{c:"l",f:"T"},120470:{c:"m",f:"T"},120471:{c:"n",f:"T"},120472:{c:"o",f:"T"},120473:{c:"p",f:"T"},120474:{c:"q",f:"T"},120475:{c:"r",f:"T"},120476:{c:"s",f:"T"},120477:{c:"t",f:"T"},120478:{c:"u",f:"T"},120479:{c:"v",f:"T"},120480:{c:"w",f:"T"},120481:{c:"x",f:"T"},120482:{c:"y",f:"T"},120483:{c:"z",f:"T"},120484:{c:"\\131",f:"MI"},120485:{c:"\\237",f:"MI"},120488:{c:"A",f:"B"},120489:{c:"B",f:"B"},120490:{c:"\\393",f:"B"},120491:{c:"\\394",f:"B"},120492:{c:"E",f:"B"},120493:{c:"Z",f:"B"},120494:{c:"H",f:"B"},120495:{c:"\\398",f:"B"},120496:{c:"I",f:"B"},120497:{c:"K",f:"B"},120498:{c:"\\39B",f:"B"},120499:{c:"M",f:"B"},120500:{c:"N",f:"B"},120501:{c:"\\39E",f:"B"},120502:{c:"O",f:"B"},120503:{c:"\\3A0",f:"B"},120504:{c:"P",f:"B"},120505:{c:"\\398",f:"B"},120506:{c:"\\3A3",f:"B"},120507:{c:"T",f:"B"},120508:{c:"\\3A5",f:"B"},120509:{c:"\\3A6",f:"B"},120510:{c:"X",f:"B"},120511:{c:"\\3A8",f:"B"},120512:{c:"\\3A9",f:"B"},120513:{c:"\\2207",f:"B"},120514:{c:"\\3B1",f:"BI"},120515:{c:"\\3B2",f:"BI"},120516:{c:"\\3B3",f:"BI"},120517:{c:"\\3B4",f:"BI"},120518:{c:"\\3B5",f:"BI"},120519:{c:"\\3B6",f:"BI"},120520:{c:"\\3B7",f:"BI"},120521:{c:"\\3B8",f:"BI"},120522:{c:"\\3B9",f:"BI"},120523:{c:"\\3BA",f:"BI"},120524:{c:"\\3BB",f:"BI"},120525:{c:"\\3BC",f:"BI"},120526:{c:"\\3BD",f:"BI"},120527:{c:"\\3BE",f:"BI"},120528:{c:"\\3BF",f:"BI"},120529:{c:"\\3C0",f:"BI"},120530:{c:"\\3C1",f:"BI"},120531:{c:"\\3C2",f:"BI"},120532:{c:"\\3C3",f:"BI"},120533:{c:"\\3C4",f:"BI"},120534:{c:"\\3C5",f:"BI"},120535:{c:"\\3C6",f:"BI"},120536:{c:"\\3C7",f:"BI"},120537:{c:"\\3C8",f:"BI"},120538:{c:"\\3C9",f:"BI"},120539:{c:"\\2202",f:"BI"},120540:{c:"\\3F5",f:"BI"},120541:{c:"\\3D1",f:"BI"},120542:{c:"\\E009",f:"A"},120543:{c:"\\3D5",f:"BI"},120544:{c:"\\3F1",f:"BI"},120545:{c:"\\3D6",f:"BI"},120546:{c:"A",f:"I"},120547:{c:"B",f:"I"},120548:{c:"\\393",f:"I"},120549:{c:"\\394",f:"I"},120550:{c:"E",f:"I"},120551:{c:"Z",f:"I"},120552:{c:"H",f:"I"},120553:{c:"\\398",f:"I"},120554:{c:"I",f:"I"},120555:{c:"K",f:"I"},120556:{c:"\\39B",f:"I"},120557:{c:"M",f:"I"},120558:{c:"N",f:"I"},120559:{c:"\\39E",f:"I"},120560:{c:"O",f:"I"},120561:{c:"\\3A0",f:"I"},120562:{c:"P",f:"I"},120563:{c:"\\398",f:"I"},120564:{c:"\\3A3",f:"I"},120565:{c:"T",f:"I"},120566:{c:"\\3A5",f:"I"},120567:{c:"\\3A6",f:"I"},120568:{c:"X",f:"I"},120569:{c:"\\3A8",f:"I"},120570:{c:"\\3A9",f:"I"},120571:{c:"\\2207",f:""},120572:{c:"\\3B1",f:"I"},120573:{c:"\\3B2",f:"I"},120574:{c:"\\3B3",f:"I"},120575:{c:"\\3B4",f:"I"},120576:{c:"\\3B5",f:"I"},120577:{c:"\\3B6",f:"I"},120578:{c:"\\3B7",f:"I"},120579:{c:"\\3B8",f:"I"},120580:{c:"\\3B9",f:"I"},120581:{c:"\\3BA",f:"I"},120582:{c:"\\3BB",f:"I"},120583:{c:"\\3BC",f:"I"},120584:{c:"\\3BD",f:"I"},120585:{c:"\\3BE",f:"I"},120586:{c:"\\3BF",f:"I"},120587:{c:"\\3C0",f:"I"},120588:{c:"\\3C1",f:"I"},120589:{c:"\\3C2",f:"I"},120590:{c:"\\3C3",f:"I"},120591:{c:"\\3C4",f:"I"},120592:{c:"\\3C5",f:"I"},120593:{c:"\\3C6",f:"I"},120594:{c:"\\3C7",f:"I"},120595:{c:"\\3C8",f:"I"},120596:{c:"\\3C9",f:"I"},120597:{c:"\\2202",f:""},120598:{c:"\\3F5",f:"I"},120599:{c:"\\3D1",f:"I"},120600:{c:"\\E009",f:"A"},120601:{c:"\\3D5",f:"I"},120602:{c:"\\3F1",f:"I"},120603:{c:"\\3D6",f:"I"},120604:{c:"A",f:"BI"},120605:{c:"B",f:"BI"},120606:{c:"\\393",f:"BI"},120607:{c:"\\394",f:"BI"},120608:{c:"E",f:"BI"},120609:{c:"Z",f:"BI"},120610:{c:"H",f:"BI"},120611:{c:"\\398",f:"BI"},120612:{c:"I",f:"BI"},120613:{c:"K",f:"BI"},120614:{c:"\\39B",f:"BI"},120615:{c:"M",f:"BI"},120616:{c:"N",f:"BI"},120617:{c:"\\39E",f:"BI"},120618:{c:"O",f:"BI"},120619:{c:"\\3A0",f:"BI"},120620:{c:"P",f:"BI"},120621:{c:"\\398",f:"BI"},120622:{c:"\\3A3",f:"BI"},120623:{c:"T",f:"BI"},120624:{c:"\\3A5",f:"BI"},120625:{c:"\\3A6",f:"BI"},120626:{c:"X",f:"BI"},120627:{c:"\\3A8",f:"BI"},120628:{c:"\\3A9",f:"BI"},120629:{c:"\\2207",f:""},120630:{c:"\\3B1",f:"BI"},120631:{c:"\\3B2",f:"BI"},120632:{c:"\\3B3",f:"BI"},120633:{c:"\\3B4",f:"BI"},120634:{c:"\\3B5",f:"BI"},120635:{c:"\\3B6",f:"BI"},120636:{c:"\\3B7",f:"BI"},120637:{c:"\\3B8",f:"BI"},120638:{c:"\\3B9",f:"BI"},120639:{c:"\\3BA",f:"BI"},120640:{c:"\\3BB",f:"BI"},120641:{c:"\\3BC",f:"BI"},120642:{c:"\\3BD",f:"BI"},120643:{c:"\\3BE",f:"BI"},120644:{c:"\\3BF",f:"BI"},120645:{c:"\\3C0",f:"BI"},120646:{c:"\\3C1",f:"BI"},120647:{c:"\\3C2",f:"BI"},120648:{c:"\\3C3",f:"BI"},120649:{c:"\\3C4",f:"BI"},120650:{c:"\\3C5",f:"BI"},120651:{c:"\\3C6",f:"BI"},120652:{c:"\\3C7",f:"BI"},120653:{c:"\\3C8",f:"BI"},120654:{c:"\\3C9",f:"BI"},120655:{c:"\\2202",f:""},120656:{c:"\\3F5",f:"BI"},120657:{c:"\\3D1",f:"BI"},120658:{c:"\\E009",f:"A"},120659:{c:"\\3D5",f:"BI"},120660:{c:"\\3F1",f:"BI"},120661:{c:"\\3D6",f:"BI"},120662:{c:"A",f:"SS-B"},120663:{c:"B",f:"SS-B"},120664:{c:"\\393",f:"SS-B"},120665:{c:"\\394",f:"SS-B"},120666:{c:"E",f:"SS-B"},120667:{c:"Z",f:"SS-B"},120668:{c:"H",f:"SS-B"},120669:{c:"\\398",f:"SS-B"},120670:{c:"I",f:"SS-B"},120671:{c:"K",f:"SS-B"},120672:{c:"\\39B",f:"SS-B"},120673:{c:"M",f:"SS-B"},120674:{c:"N",f:"SS-B"},120675:{c:"\\39E",f:"SS-B"},120676:{c:"O",f:"SS-B"},120677:{c:"\\3A0",f:"SS-B"},120678:{c:"P",f:"SS-B"},120679:{c:"\\398",f:"SS-B"},120680:{c:"\\3A3",f:"SS-B"},120681:{c:"T",f:"SS-B"},120682:{c:"\\3A5",f:"SS-B"},120683:{c:"\\3A6",f:"SS-B"},120684:{c:"X",f:"SS-B"},120685:{c:"\\3A8",f:"SS-B"},120686:{c:"\\3A9",f:"SS-B"},120687:{c:"\\2207",f:""},120688:{c:"\\3B1",f:"BI"},120689:{c:"\\3B2",f:"BI"},120690:{c:"\\3B3",f:"BI"},120691:{c:"\\3B4",f:"BI"},120692:{c:"\\3B5",f:"BI"},120693:{c:"\\3B6",f:"BI"},120694:{c:"\\3B7",f:"BI"},120695:{c:"\\3B8",f:"BI"},120696:{c:"\\3B9",f:"BI"},120697:{c:"\\3BA",f:"BI"},120698:{c:"\\3BB",f:"BI"},120699:{c:"\\3BC",f:"BI"},120700:{c:"\\3BD",f:"BI"},120701:{c:"\\3BE",f:"BI"},120702:{c:"\\3BF",f:"BI"},120703:{c:"\\3C0",f:"BI"},120704:{c:"\\3C1",f:"BI"},120705:{c:"\\3C2",f:"BI"},120706:{c:"\\3C3",f:"BI"},120707:{c:"\\3C4",f:"BI"},120708:{c:"\\3C5",f:"BI"},120709:{c:"\\3C6",f:"BI"},120710:{c:"\\3C7",f:"BI"},120711:{c:"\\3C8",f:"BI"},120712:{c:"\\3C9",f:"BI"},120713:{c:"\\2202",f:""},120714:{c:"\\3F5",f:"BI"},120715:{c:"\\3D1",f:"BI"},120716:{c:"\\E009",f:"A"},120717:{c:"\\3D5",f:"BI"},120718:{c:"\\3F1",f:"BI"},120719:{c:"\\3D6",f:"BI"},120720:{c:"A",f:"SS-I"},120721:{c:"B",f:"SS-I"},120722:{c:"\\393",f:"SS-I"},120723:{c:"\\394",f:"SS-I"},120724:{c:"E",f:"SS-I"},120725:{c:"Z",f:"SS-I"},120726:{c:"H",f:"SS-I"},120727:{c:"\\398",f:"SS-I"},120728:{c:"I",f:"SS-I"},120729:{c:"K",f:"SS-I"},120730:{c:"\\39B",f:"SS-I"},120731:{c:"M",f:"SS-I"},120732:{c:"N",f:"SS-I"},120733:{c:"\\39E",f:"SS-I"},120734:{c:"O",f:"SS-I"},120735:{c:"\\3A0",f:"SS-I"},120736:{c:"P",f:"SS-I"},120737:{c:"\\398",f:"SS-I"},120738:{c:"\\3A3",f:"SS-I"},120739:{c:"T",f:"SS-I"},120740:{c:"\\3A5",f:"SS-I"},120741:{c:"\\3A6",f:"SS-I"},120742:{c:"X",f:"SS-I"},120743:{c:"\\3A8",f:"SS-I"},120744:{c:"\\3A9",f:"SS-I"},120745:{c:"\\2207",f:""},120746:{c:"\\3B1",f:"BI"},120747:{c:"\\3B2",f:"BI"},120748:{c:"\\3B3",f:"BI"},120749:{c:"\\3B4",f:"BI"},120750:{c:"\\3B5",f:"BI"},120751:{c:"\\3B6",f:"BI"},120752:{c:"\\3B7",f:"BI"},120753:{c:"\\3B8",f:"BI"},120754:{c:"\\3B9",f:"BI"},120755:{c:"\\3BA",f:"BI"},120756:{c:"\\3BB",f:"BI"},120757:{c:"\\3BC",f:"BI"},120758:{c:"\\3BD",f:"BI"},120759:{c:"\\3BE",f:"BI"},120760:{c:"\\3BF",f:"BI"},120761:{c:"\\3C0",f:"BI"},120762:{c:"\\3C1",f:"BI"},120763:{c:"\\3C2",f:"BI"},120764:{c:"\\3C3",f:"BI"},120765:{c:"\\3C4",f:"BI"},120766:{c:"\\3C5",f:"BI"},120767:{c:"\\3C6",f:"BI"},120768:{c:"\\3C7",f:"BI"},120769:{c:"\\3C8",f:"BI"},120770:{c:"\\3C9",f:"BI"},120771:{c:"\\2202",f:""},120772:{c:"\\3F5",f:"BI"},120773:{c:"\\3D1",f:"BI"},120774:{c:"\\E009",f:"A"},120775:{c:"\\3D5",f:"BI"},120776:{c:"\\3F1",f:"BI"},120777:{c:"\\3D6",f:"BI"},120778:{c:"F",f:"I"},120779:{c:"\\3DD",f:"A"},120782:{c:"0",f:"B"},120783:{c:"1",f:"B"},120784:{c:"2",f:"B"},120785:{c:"3",f:"B"},120786:{c:"4",f:"B"},120787:{c:"5",f:"B"},120788:{c:"6",f:"B"},120789:{c:"7",f:"B"},120790:{c:"8",f:"B"},120791:{c:"9",f:"B"},120792:{c:"0",f:"B"},120793:{c:"1",f:"B"},120794:{c:"2",f:"B"},120795:{c:"3",f:"B"},120796:{c:"4",f:"B"},120797:{c:"5",f:"B"},120798:{c:"6",f:"B"},120799:{c:"7",f:"B"},120800:{c:"8",f:"B"},120801:{c:"9",f:"B"},120802:{c:"0",f:"SS"},120803:{c:"1",f:"SS"},120804:{c:"2",f:"SS"},120805:{c:"3",f:"SS"},120806:{c:"4",f:"SS"},120807:{c:"5",f:"SS"},120808:{c:"6",f:"SS"},120809:{c:"7",f:"SS"},120810:{c:"8",f:"SS"},120811:{c:"9",f:"SS"},120812:{c:"0",f:"SS-B"},120813:{c:"1",f:"SS-B"},120814:{c:"2",f:"SS-B"},120815:{c:"3",f:"SS-B"},120816:{c:"4",f:"SS-B"},120817:{c:"5",f:"SS-B"},120818:{c:"6",f:"SS-B"},120819:{c:"7",f:"SS-B"},120820:{c:"8",f:"SS-B"},120821:{c:"9",f:"SS-B"},120822:{c:"0",f:"T"},120823:{c:"1",f:"T"},120824:{c:"2",f:"T"},120825:{c:"3",f:"T"},120826:{c:"4",f:"T"},120827:{c:"5",f:"T"},120828:{c:"6",f:"T"},120829:{c:"7",f:"T"},120830:{c:"8",f:"T"},120831:{c:"9",f:"T"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.normal={32:[0,0,.25],33:[.716,0,.278],34:[.694,-.379,.5],35:[.694,.194,.833],36:[.75,.056,.5],37:[.75,.056,.833],38:[.716,.022,.778],39:[.694,-.379,.278],40:[.75,.25,.389],41:[.75,.25,.389],42:[.75,-.32,.5],43:[.583,.082,.778],44:[.121,.194,.278],45:[.252,-.179,.333],46:[.12,0,.278],47:[.75,.25,.5],48:[.666,.022,.5],49:[.666,0,.5],50:[.666,0,.5],51:[.665,.022,.5],52:[.677,0,.5],53:[.666,.022,.5],54:[.666,.022,.5],55:[.676,.022,.5],56:[.666,.022,.5],57:[.666,.022,.5],58:[.43,0,.278],59:[.43,.194,.278],60:[.54,.04,.778],61:[.583,.082,.778],62:[.54,.04,.778],63:[.705,0,.472],64:[.705,.011,.778],65:[.716,0,.75],66:[.683,0,.708],67:[.705,.021,.722],68:[.683,0,.764],69:[.68,0,.681],70:[.68,0,.653],71:[.705,.022,.785],72:[.683,0,.75],73:[.683,0,.361],74:[.683,.022,.514],75:[.683,0,.778],76:[.683,0,.625],77:[.683,0,.917],78:[.683,0,.75],79:[.705,.022,.778],80:[.683,0,.681],81:[.705,.193,.778],82:[.683,.022,.736],83:[.705,.022,.556],84:[.677,0,.722],85:[.683,.022,.75],86:[.683,.022,.75],87:[.683,.022,1.028],88:[.683,0,.75],89:[.683,0,.75],90:[.683,0,.611],91:[.75,.25,.278],92:[.75,.25,.5],93:[.75,.25,.278],94:[.694,-.531,.5],95:[-.025,.062,.5],96:[.699,-.505,.5],97:[.448,.011,.5],98:[.694,.011,.556],99:[.448,.011,.444],100:[.694,.011,.556],101:[.448,.011,.444],102:[.705,0,.306,{ic:.066}],103:[.453,.206,.5],104:[.694,0,.556],105:[.669,0,.278],106:[.669,.205,.306],107:[.694,0,.528],108:[.694,0,.278],109:[.442,0,.833],110:[.442,0,.556],111:[.448,.01,.5],112:[.442,.194,.556],113:[.442,.194,.528],114:[.442,0,.392],115:[.448,.011,.394],116:[.615,.01,.389],117:[.442,.011,.556],118:[.431,.011,.528],119:[.431,.011,.722],120:[.431,0,.528],121:[.431,.204,.528],122:[.431,0,.444],123:[.75,.25,.5],124:[.75,.249,.278],125:[.75,.25,.5],126:[.318,-.215,.5],160:[0,0,.25],163:[.714,.011,.769],165:[.683,0,.75],168:[.669,-.554,.5],172:[.356,-.089,.667],174:[.709,.175,.947],175:[.59,-.544,.5],176:[.715,-.542,.5],177:[.666,0,.778],180:[.699,-.505,.5],183:[.31,-.19,.278],215:[.491,-.009,.778],240:[.749,.021,.556],247:[.537,.036,.778],295:[.695,.013,.54],305:[.442,0,.278,{sk:.0278}],567:[.442,.205,.306,{sk:.0833}],697:[.56,-.043,.275],710:[.694,-.531,.5],711:[.644,-.513,.5],713:[.59,-.544,.5],714:[.699,-.505,.5],715:[.699,-.505,.5],728:[.694,-.515,.5],729:[.669,-.549,.5],730:[.715,-.542,.5],732:[.668,-.565,.5],768:[.699,-.505,0],769:[.699,-.505,0],770:[.694,-.531,0],771:[.668,-.565,0],772:[.59,-.544,0],774:[.694,-.515,0],775:[.669,-.549,0],776:[.669,-.554,0],778:[.715,-.542,0],779:[.701,-.51,0],780:[.644,-.513,0],824:[.716,.215,0],913:[.716,0,.75],914:[.683,0,.708],915:[.68,0,.625],916:[.716,0,.833],917:[.68,0,.681],918:[.683,0,.611],919:[.683,0,.75],920:[.705,.022,.778],921:[.683,0,.361],922:[.683,0,.778],923:[.716,0,.694],924:[.683,0,.917],925:[.683,0,.75],926:[.677,0,.667],927:[.705,.022,.778],928:[.68,0,.75],929:[.683,0,.681],930:[.705,.022,.778],931:[.683,0,.722],932:[.677,0,.722],933:[.705,0,.778],934:[.683,0,.722],935:[.683,0,.75],936:[.683,0,.778],937:[.704,0,.722],978:[.705,0,.778],988:[.68,0,.653],989:[.605,.085,.778],1008:[.434,.006,.667,{ic:.067}],1014:[.44,0,.429],8192:[0,0,.5],8193:[0,0,1],8194:[0,0,.5],8195:[0,0,1],8196:[0,0,.333],8197:[0,0,.25],8198:[0,0,.167],8201:[0,0,.167],8202:[0,0,.1],8203:[0,0,0],8204:[0,0,0],8211:[.285,-.248,.5],8212:[.285,-.248,1],8213:[.285,-.248,1],8214:[.75,.25,.5],8215:[-.025,.062,.5],8216:[.694,-.379,.278],8217:[.694,-.379,.278],8220:[.694,-.379,.5],8221:[.694,-.379,.5],8224:[.705,.216,.444],8225:[.705,.205,.444],8226:[.444,-.055,.5],8230:[.12,0,1.172],8242:[.56,-.043,.275],8243:[.56,0,.55],8244:[.56,0,.825],8245:[.56,-.043,.275],8246:[.56,0,.55],8247:[.56,0,.825],8254:[.59,-.544,.5],8260:[.75,.25,.5],8279:[.56,0,1.1],8289:[0,0,0],8290:[0,0,0],8291:[0,0,0],8292:[0,0,0],8407:[.714,-.516,.5],8450:[.702,.019,.722],8459:[.717,.036,.969,{ic:.272,sk:.333}],8460:[.666,.133,.72],8461:[.683,0,.778],8462:[.694,.011,.576,{sk:-.0278}],8463:[.695,.013,.54],8464:[.717,.314,1.052,{ic:.081,sk:.417}],8465:[.686,.026,.554],8466:[.717,.017,.874,{ic:.161,sk:.306}],8467:[.705,.02,.417,{sk:.111}],8469:[.683,.02,.722],8472:[.453,.216,.636,{sk:.111}],8473:[.683,0,.611],8474:[.701,.181,.778],8475:[.717,.017,.85,{sk:.194}],8476:[.686,.026,.828],8477:[.683,0,.722],8484:[.683,0,.667],8486:[.704,0,.722],8487:[.684,.022,.722],8488:[.729,.139,.602],8492:[.708,.028,.908,{sk:.194}],8493:[.685,.024,.613],8496:[.707,.008,.562,{ic:.156,sk:.139}],8497:[.735,.036,.895,{ic:.095,sk:.222}],8498:[.695,0,.556],8499:[.721,.05,1.08,{ic:.136,sk:.444}],8501:[.694,0,.611],8502:[.763,.021,.667],8503:[.764,.043,.444],8504:[.764,.043,.667],8513:[.705,.023,.639],8592:[.511,.011,1],8593:[.694,.193,.5],8594:[.511,.011,1],8595:[.694,.194,.5],8596:[.511,.011,1],8597:[.772,.272,.5],8598:[.72,.195,1],8599:[.72,.195,1],8600:[.695,.22,1],8601:[.695,.22,1],8602:[.437,-.06,1],8603:[.437,-.06,1],8606:[.417,-.083,1],8608:[.417,-.083,1],8610:[.417,-.083,1.111],8611:[.417,-.083,1.111],8614:[.511,.011,1],8617:[.511,.011,1.126],8618:[.511,.011,1.126],8619:[.575,.041,1],8620:[.575,.041,1],8621:[.417,-.083,1.389],8622:[.437,-.06,1],8624:[.722,0,.5],8625:[.722,0,.5],8630:[.461,0,1],8631:[.46,0,1],8634:[.65,.083,.778],8635:[.65,.083,.778],8636:[.511,-.23,1],8637:[.27,.011,1],8638:[.694,.194,.417],8639:[.694,.194,.417],8640:[.511,-.23,1],8641:[.27,.011,1],8642:[.694,.194,.417],8643:[.694,.194,.417],8644:[.667,0,1],8646:[.667,0,1],8647:[.583,.083,1],8648:[.694,.193,.833],8649:[.583,.083,1],8650:[.694,.194,.833],8651:[.514,.014,1],8652:[.671,.011,1],8653:[.534,.035,1],8654:[.534,.037,1],8655:[.534,.035,1],8656:[.525,.024,1],8657:[.694,.194,.611],8658:[.525,.024,1],8659:[.694,.194,.611],8660:[.526,.025,1],8661:[.772,.272,.611],8666:[.611,.111,1],8667:[.611,.111,1],8669:[.417,-.083,1],8672:[.437,-.064,1.334],8674:[.437,-.064,1.334],8704:[.694,.022,.556],8705:[.846,.021,.5],8706:[.715,.022,.531,{sk:.0833}],8707:[.694,0,.556],8708:[.716,.215,.556],8709:[.772,.078,.5],8710:[.716,0,.833],8711:[.683,.033,.833],8712:[.54,.04,.667],8713:[.716,.215,.667],8715:[.54,.04,.667],8716:[.716,.215,.667],8717:[.44,0,.429],8719:[.75,.25,.944],8720:[.75,.25,.944],8721:[.75,.25,1.056],8722:[.583,.082,.778],8723:[.5,.166,.778],8724:[.766,.093,.778],8725:[.75,.25,.5],8726:[.43,.023,.778],8727:[.465,-.035,.5],8728:[.444,-.055,.5],8729:[.444,-.055,.5],8730:[.8,.2,.833],8733:[.442,.011,.778],8734:[.442,.011,1],8736:[.694,0,.722],8737:[.714,.02,.722],8738:[.551,.051,.722],8739:[.75,.249,.278],8740:[.75,.252,.278],8741:[.75,.25,.5],8742:[.75,.25,.5],8743:[.598,.022,.667],8744:[.598,.022,.667],8745:[.598,.022,.667],8746:[.598,.022,.667],8747:[.716,.216,.417,{ic:.055}],8748:[.805,.306,.819,{ic:.138}],8749:[.805,.306,1.166,{ic:.138}],8750:[.805,.306,.472,{ic:.138}],8756:[.471,.082,.667],8757:[.471,.082,.667],8764:[.367,-.133,.778],8765:[.367,-.133,.778],8768:[.583,.083,.278],8769:[.467,-.032,.778],8770:[.463,-.034,.778],8771:[.464,-.036,.778],8772:[.716,.215,.778],8773:[.589,-.022,.778],8774:[.652,.155,.778],8775:[.652,.155,.778],8776:[.483,-.055,.778],8777:[.716,.215,.778],8778:[.579,.039,.778],8781:[.484,-.016,.778],8782:[.492,-.008,.778],8783:[.492,-.133,.778],8784:[.67,-.133,.778],8785:[.609,.108,.778],8786:[.601,.101,.778],8787:[.601,.102,.778],8790:[.367,-.133,.778],8791:[.721,-.133,.778],8796:[.859,-.133,.778],8800:[.716,.215,.778],8801:[.464,-.036,.778],8802:[.716,.215,.778],8804:[.636,.138,.778],8805:[.636,.138,.778],8806:[.753,.175,.778],8807:[.753,.175,.778],8808:[.752,.286,.778],8809:[.752,.286,.778],8810:[.568,.067,1],8811:[.567,.067,1],8812:[.75,.25,.5],8813:[.716,.215,.778],8814:[.708,.209,.778],8815:[.708,.209,.778],8816:[.801,.303,.778],8817:[.801,.303,.778],8818:[.732,.228,.778],8819:[.732,.228,.778],8820:[.732,.228,.778],8821:[.732,.228,.778],8822:[.681,.253,.778],8823:[.681,.253,.778],8824:[.716,.253,.778],8825:[.716,.253,.778],8826:[.539,.041,.778],8827:[.539,.041,.778],8828:[.58,.153,.778],8829:[.58,.154,.778],8830:[.732,.228,.778],8831:[.732,.228,.778],8832:[.705,.208,.778],8833:[.705,.208,.778],8834:[.54,.04,.778],8835:[.54,.04,.778],8836:[.716,.215,.778],8837:[.716,.215,.778],8838:[.636,.138,.778],8839:[.636,.138,.778],8840:[.801,.303,.778],8841:[.801,.303,.778],8842:[.635,.241,.778],8843:[.635,.241,.778],8846:[.598,.022,.667],8847:[.539,.041,.778],8848:[.539,.041,.778],8849:[.636,.138,.778],8850:[.636,.138,.778],8851:[.598,0,.667],8852:[.598,0,.667],8853:[.583,.083,.778],8854:[.583,.083,.778],8855:[.583,.083,.778],8856:[.583,.083,.778],8857:[.583,.083,.778],8858:[.582,.082,.778],8859:[.582,.082,.778],8861:[.582,.082,.778],8862:[.689,0,.778],8863:[.689,0,.778],8864:[.689,0,.778],8865:[.689,0,.778],8866:[.694,0,.611],8867:[.694,0,.611],8868:[.668,0,.778],8869:[.668,0,.778],8872:[.75,.249,.867],8873:[.694,0,.722],8874:[.694,0,.889],8876:[.695,0,.611],8877:[.695,0,.611],8878:[.695,0,.722],8879:[.695,0,.722],8882:[.539,.041,.778],8883:[.539,.041,.778],8884:[.636,.138,.778],8885:[.636,.138,.778],8888:[.408,-.092,1.111],8890:[.431,.212,.556],8891:[.716,0,.611],8892:[.716,0,.611],8896:[.75,.249,.833],8897:[.75,.249,.833],8898:[.75,.249,.833],8899:[.75,.249,.833],8900:[.488,-.012,.5],8901:[.31,-.19,.278],8902:[.486,-.016,.5],8903:[.545,.044,.778],8904:[.505,.005,.9],8905:[.492,-.008,.778],8906:[.492,-.008,.778],8907:[.694,.022,.778],8908:[.694,.022,.778],8909:[.464,-.036,.778],8910:[.578,.021,.76],8911:[.578,.022,.76],8912:[.54,.04,.778],8913:[.54,.04,.778],8914:[.598,.022,.667],8915:[.598,.022,.667],8916:[.736,.022,.667],8918:[.541,.041,.778],8919:[.541,.041,.778],8920:[.568,.067,1.333],8921:[.568,.067,1.333],8922:[.886,.386,.778],8923:[.886,.386,.778],8926:[.734,0,.778],8927:[.734,0,.778],8928:[.801,.303,.778],8929:[.801,.303,.778],8930:[.716,.215,.778],8931:[.716,.215,.778],8934:[.73,.359,.778],8935:[.73,.359,.778],8936:[.73,.359,.778],8937:[.73,.359,.778],8938:[.706,.208,.778],8939:[.706,.208,.778],8940:[.802,.303,.778],8941:[.801,.303,.778],8942:[1.3,.03,.278],8943:[.31,-.19,1.172],8945:[1.52,-.1,1.282],8965:[.716,0,.611],8966:[.813,.097,.611],8968:[.75,.25,.444],8969:[.75,.25,.444],8970:[.75,.25,.444],8971:[.75,.25,.444],8988:[.694,-.306,.5],8989:[.694,-.306,.5],8990:[.366,.022,.5],8991:[.366,.022,.5],8994:[.388,-.122,1],8995:[.378,-.134,1],9001:[.75,.25,.389],9002:[.75,.25,.389],9136:[.744,.244,.412],9137:[.744,.244,.412],9168:[.602,0,.667],9416:[.709,.175,.902],9484:[.694,-.306,.5],9488:[.694,-.306,.5],9492:[.366,.022,.5],9496:[.366,.022,.5],9585:[.694,.195,.889],9586:[.694,.195,.889],9632:[.689,0,.778],9633:[.689,0,.778],9642:[.689,0,.778],9650:[.575,.02,.722],9651:[.716,0,.889],9652:[.575,.02,.722],9653:[.716,0,.889],9654:[.539,.041,.778],9656:[.539,.041,.778],9657:[.505,.005,.5],9660:[.576,.019,.722],9661:[.5,.215,.889],9662:[.576,.019,.722],9663:[.5,.215,.889],9664:[.539,.041,.778],9666:[.539,.041,.778],9667:[.505,.005,.5],9674:[.716,.132,.667],9711:[.715,.215,1],9723:[.689,0,.778],9724:[.689,0,.778],9733:[.694,.111,.944],9824:[.727,.13,.778],9825:[.716,.033,.778],9826:[.727,.162,.778],9827:[.726,.13,.778],9837:[.75,.022,.389],9838:[.734,.223,.389],9839:[.723,.223,.389],10003:[.706,.034,.833],10016:[.716,.022,.833],10072:[.75,.249,.278],10216:[.75,.25,.389],10217:[.75,.25,.389],10222:[.744,.244,.412],10223:[.744,.244,.412],10229:[.511,.011,1.609],10230:[.511,.011,1.638],10231:[.511,.011,1.859],10232:[.525,.024,1.609],10233:[.525,.024,1.638],10234:[.525,.024,1.858],10236:[.511,.011,1.638],10731:[.716,.132,.667],10744:[.716,.215,.778],10752:[.75,.25,1.111],10753:[.75,.25,1.111],10754:[.75,.25,1.111],10756:[.75,.249,.833],10758:[.75,.249,.833],10764:[.805,.306,1.638,{ic:.138}],10799:[.491,-.009,.778],10815:[.683,0,.75],10846:[.813,.097,.611],10877:[.636,.138,.778],10878:[.636,.138,.778],10885:[.762,.29,.778],10886:[.762,.29,.778],10887:[.635,.241,.778],10888:[.635,.241,.778],10889:[.761,.387,.778],10890:[.761,.387,.778],10891:[1.003,.463,.778],10892:[1.003,.463,.778],10901:[.636,.138,.778],10902:[.636,.138,.778],10927:[.636,.138,.778],10928:[.636,.138,.778],10933:[.752,.286,.778],10934:[.752,.286,.778],10935:[.761,.294,.778],10936:[.761,.294,.778],10937:[.761,.337,.778],10938:[.761,.337,.778],10949:[.753,.215,.778],10950:[.753,.215,.778],10955:[.783,.385,.778],10956:[.783,.385,.778],12296:[.75,.25,.389],12297:[.75,.25,.389],57350:[.43,.023,.222],57351:[.431,.024,.389],57352:[.605,.085,.778],57353:[.434,.006,.667,{ic:.067}],57356:[.752,.284,.778],57357:[.752,.284,.778],57358:[.919,.421,.778],57359:[.801,.303,.778],57360:[.801,.303,.778],57361:[.919,.421,.778],57366:[.828,.33,.778],57367:[.752,.332,.778],57368:[.828,.33,.778],57369:[.752,.333,.778],57370:[.634,.255,.778],57371:[.634,.254,.778],119808:[.698,0,.869],119809:[.686,0,.818],119810:[.697,.011,.831],119811:[.686,0,.882],119812:[.68,0,.756],119813:[.68,0,.724],119814:[.697,.01,.904],119815:[.686,0,.9],119816:[.686,0,.436],119817:[.686,.011,.594],119818:[.686,0,.901],119819:[.686,0,.692],119820:[.686,0,1.092],119821:[.686,0,.9],119822:[.696,.01,.864],119823:[.686,0,.786],119824:[.696,.193,.864],119825:[.686,.011,.862],119826:[.697,.011,.639],119827:[.675,0,.8],119828:[.686,.011,.885],119829:[.686,.007,.869],119830:[.686,.007,1.189],119831:[.686,0,.869],119832:[.686,0,.869],119833:[.686,0,.703],119834:[.453,.006,.559],119835:[.694,.006,.639],119836:[.453,.006,.511],119837:[.694,.006,.639],119838:[.452,.006,.527],119839:[.7,0,.351,{ic:.101}],119840:[.455,.201,.575],119841:[.694,0,.639],119842:[.695,0,.319],119843:[.695,.2,.351],119844:[.694,0,.607],119845:[.694,0,.319],119846:[.45,0,.958],119847:[.45,0,.639],119848:[.452,.005,.575],119849:[.45,.194,.639],119850:[.45,.194,.607],119851:[.45,0,.474],119852:[.453,.006,.454],119853:[.635,.005,.447],119854:[.45,.006,.639],119855:[.444,0,.607],119856:[.444,0,.831],119857:[.444,0,.607],119858:[.444,.2,.607],119859:[.444,0,.511],119860:[.716,0,.75,{sk:.139}],119861:[.683,0,.759,{sk:.0833}],119862:[.705,.022,.715,{sk:.0833}],119863:[.683,0,.828,{sk:.0556}],119864:[.68,0,.738,{sk:.0833}],119865:[.68,0,.643,{ic:.106,sk:.0833}],119866:[.705,.022,.786,{sk:.0833}],119867:[.683,0,.831,{ic:.057,sk:.0556}],119868:[.683,0,.44,{ic:.064,sk:.111}],119869:[.683,.022,.555,{ic:.078,sk:.167}],119870:[.683,0,.849,{sk:.0556}],119871:[.683,0,.681,{sk:.0278}],119872:[.683,0,.97,{ic:.081,sk:.0833}],119873:[.683,0,.803,{ic:.085,sk:.0833}],119874:[.704,.022,.763,{sk:.0833}],119875:[.683,0,.642,{ic:.109,sk:.0833}],119876:[.704,.194,.791,{sk:.0833}],119877:[.683,.021,.759,{sk:.0833}],119878:[.705,.022,.613,{sk:.0833}],119879:[.677,0,.584,{ic:.12,sk:.0833}],119880:[.683,.022,.683,{ic:.084,sk:.0278}],119881:[.683,.022,.583,{ic:.186}],119882:[.683,.022,.944,{ic:.104}],119883:[.683,0,.828,{sk:.0833}],119884:[.683,0,.581,{ic:.182}],119885:[.683,0,.683,{sk:.0833}],119886:[.441,.01,.529],119887:[.694,.011,.429],119888:[.442,.011,.433,{sk:.0556}],119889:[.694,.01,.52,{sk:.167}],119890:[.442,.011,.466,{sk:.0556}],119891:[.705,.205,.49,{ic:.06,sk:.167}],119892:[.442,.205,.477,{sk:.0278}],119893:[.694,.011,.576,{sk:-.0278}],119894:[.661,.011,.345],119895:[.661,.204,.412],119896:[.694,.011,.521],119897:[.694,.011,.298,{sk:.0833}],119898:[.442,.011,.878],119899:[.442,.011,.6],119900:[.441,.011,.485,{sk:.0556}],119901:[.442,.194,.503,{sk:.0833}],119902:[.442,.194,.446,{sk:.0833}],119903:[.442,.011,.451,{sk:.0556}],119904:[.442,.01,.469,{sk:.0556}],119905:[.626,.011,.361,{sk:.0833}],119906:[.442,.011,.572,{sk:.0278}],119907:[.443,.011,.485,{sk:.0278}],119908:[.443,.011,.716,{sk:.0833}],119909:[.442,.011,.572,{sk:.0278}],119910:[.442,.205,.49,{sk:.0556}],119911:[.442,.011,.465,{sk:.0556}],119912:[.711,0,.869,{sk:.16}],119913:[.686,0,.866,{sk:.0958}],119914:[.703,.017,.817,{sk:.0958}],119915:[.686,0,.938,{sk:.0639}],119916:[.68,0,.81,{sk:.0958}],119917:[.68,0,.689,{ic:.12,sk:.0958}],119918:[.703,.016,.887,{sk:.0958}],119919:[.686,0,.982,{sk:.0639}],119920:[.686,0,.511,{ic:.062,sk:.128}],119921:[.686,.017,.631,{ic:.063,sk:.192}],119922:[.686,0,.971,{sk:.0639}],119923:[.686,0,.756,{sk:.0319}],119924:[.686,0,1.142,{ic:.077,sk:.0958}],119925:[.686,0,.95,{ic:.077,sk:.0958}],119926:[.703,.017,.837,{sk:.0958}],119927:[.686,0,.723,{ic:.124,sk:.0958}],119928:[.703,.194,.869,{sk:.0958}],119929:[.686,.017,.872,{sk:.0958}],119930:[.703,.017,.693,{sk:.0958}],119931:[.675,0,.637,{ic:.135,sk:.0958}],119932:[.686,.016,.8,{ic:.077,sk:.0319}],119933:[.686,.016,.678,{ic:.208}],119934:[.686,.017,1.093,{ic:.114}],119935:[.686,0,.947,{sk:.0958}],119936:[.686,0,.675,{ic:.201}],119937:[.686,0,.773,{sk:.0958}],119938:[.452,.008,.633],119939:[.694,.008,.521],119940:[.451,.008,.513,{sk:.0639}],119941:[.694,.008,.61,{sk:.192}],119942:[.452,.008,.554,{sk:.0639}],119943:[.701,.201,.568,{ic:.056,sk:.192}],119944:[.452,.202,.545,{sk:.0319}],119945:[.694,.008,.668,{sk:-.0319}],119946:[.694,.008,.405],119947:[.694,.202,.471],119948:[.694,.008,.604],119949:[.694,.008,.348,{sk:.0958}],119950:[.452,.008,1.032],119951:[.452,.008,.713],119952:[.452,.008,.585,{sk:.0639}],119953:[.452,.194,.601,{sk:.0958}],119954:[.452,.194,.542,{sk:.0958}],119955:[.452,.008,.529,{sk:.0639}],119956:[.451,.008,.531,{sk:.0639}],119957:[.643,.007,.415,{sk:.0958}],119958:[.452,.008,.681,{sk:.0319}],119959:[.453,.008,.567,{sk:.0319}],119960:[.453,.008,.831,{sk:.0958}],119961:[.452,.008,.659,{sk:.0319}],119962:[.452,.202,.59,{sk:.0639}],119963:[.452,.008,.555,{sk:.0639}],119964:[.717,.008,.803,{ic:.213,sk:.389}],119965:[.708,.028,.908,{sk:.194}],119966:[.728,.026,.666,{ic:.153,sk:.278}],119967:[.708,.031,.774,{ic:.081,sk:.111}],119968:[.707,.008,.562,{ic:.156,sk:.139}],119969:[.735,.036,.895,{ic:.095,sk:.222}],119970:[.717,.037,.61,{ic:.128,sk:.25}],119971:[.717,.036,.969,{ic:.272,sk:.333}],119972:[.717,.017,.809,{ic:.137,sk:.333}],119973:[.717,.314,1.052,{ic:.081,sk:.417}],119974:[.717,.037,.914,{ic:.29,sk:.361}],119975:[.717,.017,.874,{ic:.161,sk:.306}],119976:[.721,.05,1.08,{ic:.136,sk:.444}],119977:[.726,.036,.902,{ic:.306,sk:.389}],119978:[.707,.008,.738,{ic:.067,sk:.167}],119979:[.716,.037,1.013,{sk:.222}],119980:[.717,.017,.883,{sk:.278}],119981:[.717,.017,.85,{sk:.194}],119982:[.708,.036,.868,{ic:.148,sk:.333}],119983:[.735,.037,.747,{ic:.249,sk:.222}],119984:[.717,.017,.8,{ic:.16,sk:.25}],119985:[.717,.017,.622,{ic:.228,sk:.222}],119986:[.717,.017,.805,{ic:.221,sk:.25}],119987:[.717,.017,.944,{ic:.187,sk:.278}],119988:[.716,.017,.71,{ic:.249,sk:.194}],119989:[.717,.016,.821,{ic:.211,sk:.306}],119990:[.441,.01,.529],119991:[.694,.011,.429],119992:[.442,.011,.433,{sk:.0556}],119993:[.694,.01,.52,{sk:.167}],119994:[.442,.011,.466,{sk:.0556}],119995:[.705,.205,.49,{ic:.06,sk:.167}],119996:[.442,.205,.477,{sk:.0278}],119997:[.694,.011,.576,{sk:-.0278}],119998:[.661,.011,.345],119999:[.661,.204,.412],12e4:[.694,.011,.521],120001:[.694,.011,.298,{sk:.0833}],120002:[.442,.011,.878],120003:[.442,.011,.6],120004:[.441,.011,.485,{sk:.0556}],120005:[.442,.194,.503,{sk:.0833}],120006:[.442,.194,.446,{sk:.0833}],120007:[.442,.011,.451,{sk:.0556}],120008:[.442,.01,.469,{sk:.0556}],120009:[.626,.011,.361,{sk:.0833}],120010:[.442,.011,.572,{sk:.0278}],120011:[.443,.011,.485,{sk:.0278}],120012:[.443,.011,.716,{sk:.0833}],120013:[.442,.011,.572,{sk:.0278}],120014:[.442,.205,.49,{sk:.0556}],120015:[.442,.011,.465,{sk:.0556}],120016:[.717,.008,.803,{ic:.213,sk:.389}],120017:[.708,.028,.908,{sk:.194}],120018:[.728,.026,.666,{ic:.153,sk:.278}],120019:[.708,.031,.774,{ic:.081,sk:.111}],120020:[.707,.008,.562,{ic:.156,sk:.139}],120021:[.735,.036,.895,{ic:.095,sk:.222}],120022:[.717,.037,.61,{ic:.128,sk:.25}],120023:[.717,.036,.969,{ic:.272,sk:.333}],120024:[.717,.017,.809,{ic:.137,sk:.333}],120025:[.717,.314,1.052,{ic:.081,sk:.417}],120026:[.717,.037,.914,{ic:.29,sk:.361}],120027:[.717,.017,.874,{ic:.161,sk:.306}],120028:[.721,.05,1.08,{ic:.136,sk:.444}],120029:[.726,.036,.902,{ic:.306,sk:.389}],120030:[.707,.008,.738,{ic:.067,sk:.167}],120031:[.716,.037,1.013,{sk:.222}],120032:[.717,.017,.883,{sk:.278}],120033:[.717,.017,.85,{sk:.194}],120034:[.708,.036,.868,{ic:.148,sk:.333}],120035:[.735,.037,.747,{ic:.249,sk:.222}],120036:[.717,.017,.8,{ic:.16,sk:.25}],120037:[.717,.017,.622,{ic:.228,sk:.222}],120038:[.717,.017,.805,{ic:.221,sk:.25}],120039:[.717,.017,.944,{ic:.187,sk:.278}],120040:[.716,.017,.71,{ic:.249,sk:.194}],120041:[.717,.016,.821,{ic:.211,sk:.306}],120042:[.452,.008,.633],120043:[.694,.008,.521],120044:[.451,.008,.513,{sk:.0639}],120045:[.694,.008,.61,{sk:.192}],120046:[.452,.008,.554,{sk:.0639}],120047:[.701,.201,.568,{ic:.056,sk:.192}],120048:[.452,.202,.545,{sk:.0319}],120049:[.694,.008,.668,{sk:-.0319}],120050:[.694,.008,.405],120051:[.694,.202,.471],120052:[.694,.008,.604],120053:[.694,.008,.348,{sk:.0958}],120054:[.452,.008,1.032],120055:[.452,.008,.713],120056:[.452,.008,.585,{sk:.0639}],120057:[.452,.194,.601,{sk:.0958}],120058:[.452,.194,.542,{sk:.0958}],120059:[.452,.008,.529,{sk:.0639}],120060:[.451,.008,.531,{sk:.0639}],120061:[.643,.007,.415,{sk:.0958}],120062:[.452,.008,.681,{sk:.0319}],120063:[.453,.008,.567,{sk:.0319}],120064:[.453,.008,.831,{sk:.0958}],120065:[.452,.008,.659,{sk:.0319}],120066:[.452,.202,.59,{sk:.0639}],120067:[.452,.008,.555,{sk:.0639}],120068:[.696,.026,.718],120069:[.691,.027,.884],120070:[.685,.024,.613],120071:[.685,.027,.832],120072:[.685,.024,.663],120073:[.686,.153,.611],120074:[.69,.026,.785],120075:[.666,.133,.72],120076:[.686,.026,.554],120077:[.686,.139,.552],120078:[.68,.027,.668],120079:[.686,.026,.666],120080:[.692,.027,1.05],120081:[.686,.025,.832],120082:[.729,.027,.827],120083:[.692,.218,.828],120084:[.729,.069,.827],120085:[.686,.026,.828],120086:[.692,.027,.829],120087:[.701,.027,.669],120088:[.697,.027,.646],120089:[.686,.026,.831],120090:[.686,.027,1.046],120091:[.688,.027,.719],120092:[.686,.218,.833],120093:[.729,.139,.602],120094:[.47,.035,.5],120095:[.685,.031,.513],120096:[.466,.029,.389],120097:[.609,.033,.499],120098:[.467,.03,.401],120099:[.681,.221,.326],120100:[.47,.209,.504],120101:[.688,.205,.521],120102:[.673,.02,.279],120103:[.672,.208,.281],120104:[.689,.025,.389],120105:[.685,.02,.28],120106:[.475,.026,.767],120107:[.475,.022,.527],120108:[.48,.028,.489],120109:[.541,.212,.5],120110:[.479,.219,.489],120111:[.474,.021,.389],120112:[.478,.029,.443],120113:[.64,.02,.333],120114:[.474,.023,.517],120115:[.53,.028,.512],120116:[.532,.028,.774],120117:[.472,.188,.389],120118:[.528,.218,.499],120119:[.471,.214,.391],120120:[.701,0,.722],120121:[.683,0,.667],120122:[.702,.019,.722],120123:[.683,0,.722],120124:[.683,0,.667],120125:[.683,0,.611],120126:[.702,.019,.778],120127:[.683,0,.778],120128:[.683,0,.389],120129:[.683,.077,.5],120130:[.683,0,.778],120131:[.683,0,.667],120132:[.683,0,.944],120133:[.683,.02,.722],120134:[.701,.019,.778],120135:[.683,0,.611],120136:[.701,.181,.778],120137:[.683,0,.722],120138:[.702,.012,.556],120139:[.683,0,.667],120140:[.683,.019,.722],120141:[.683,.02,.722],120142:[.683,.019,1],120143:[.683,0,.722],120144:[.683,0,.722],120145:[.683,0,.667],120146:[.453,.006,.559],120147:[.694,.006,.639],120148:[.453,.006,.511],120149:[.694,.006,.639],120150:[.452,.006,.527],120151:[.7,0,.351,{ic:.101}],120152:[.455,.201,.575],120153:[.694,0,.639],120154:[.695,0,.319],120155:[.695,.2,.351],120156:[.683,0,.556],120157:[.694,0,.319],120158:[.45,0,.958],120159:[.45,0,.639],120160:[.452,.005,.575],120161:[.45,.194,.639],120162:[.45,.194,.607],120163:[.45,0,.474],120164:[.453,.006,.454],120165:[.635,.005,.447],120166:[.45,.006,.639],120167:[.444,0,.607],120168:[.444,0,.831],120169:[.444,0,.607],120170:[.444,.2,.607],120171:[.444,0,.511],120172:[.686,.031,.847],120173:[.684,.031,1.044],120174:[.676,.032,.723],120175:[.683,.029,.982],120176:[.686,.029,.783],120177:[.684,.146,.722],120178:[.687,.029,.927],120179:[.683,.126,.851],120180:[.681,.025,.655],120181:[.68,.141,.652],120182:[.681,.026,.789],120183:[.683,.028,.786],120184:[.683,.032,1.239],120185:[.679,.03,.983],120186:[.726,.03,.976],120187:[.688,.223,.977],120188:[.726,.083,.976],120189:[.688,.028,.978],120190:[.685,.031,.978],120191:[.686,.03,.79],120192:[.688,.039,.851],120193:[.685,.029,.982],120194:[.683,.03,1.235],120195:[.681,.035,.849],120196:[.688,.214,.984],120197:[.677,.148,.711],120198:[.472,.032,.603],120199:[.69,.032,.59],120200:[.473,.026,.464],120201:[.632,.028,.589],120202:[.471,.027,.472],120203:[.687,.222,.388],120204:[.472,.208,.595],120205:[.687,.207,.615],120206:[.686,.025,.331],120207:[.682,.203,.332],120208:[.682,.025,.464],120209:[.681,.024,.337],120210:[.476,.031,.921],120211:[.473,.028,.654],120212:[.482,.034,.609],120213:[.557,.207,.604],120214:[.485,.211,.596],120215:[.472,.026,.46],120216:[.479,.034,.523],120217:[.648,.027,.393],120218:[.472,.032,.589],120219:[.546,.027,.604],120220:[.549,.032,.918],120221:[.471,.188,.459],120222:[.557,.221,.589],120223:[.471,.214,.461],120224:[.694,0,.667],120225:[.694,0,.667],120226:[.705,.011,.639],120227:[.694,0,.722],120228:[.691,0,.597],120229:[.691,0,.569],120230:[.704,.011,.667],120231:[.694,0,.708],120232:[.694,0,.278],120233:[.694,.022,.472],120234:[.694,0,.694],120235:[.694,0,.542],120236:[.694,0,.875],120237:[.694,0,.708],120238:[.715,.022,.736],120239:[.694,0,.639],120240:[.715,.125,.736],120241:[.694,0,.646],120242:[.716,.022,.556],120243:[.688,0,.681],120244:[.694,.022,.688],120245:[.694,0,.667],120246:[.694,0,.944],120247:[.694,0,.667],120248:[.694,0,.667],120249:[.694,0,.611],120250:[.46,.01,.481],120251:[.694,.011,.517],120252:[.46,.01,.444],120253:[.694,.01,.517],120254:[.461,.01,.444],120255:[.705,0,.306],120256:[.455,.206,.5],120257:[.694,0,.517],120258:[.68,0,.239],120259:[.68,.205,.267],120260:[.694,0,.489],120261:[.694,0,.239],120262:[.455,0,.794],120263:[.455,0,.517],120264:[.46,.01,.5],120265:[.455,.194,.517],120266:[.455,.194,.517],120267:[.455,0,.342],120268:[.46,.01,.383],120269:[.571,.01,.361],120270:[.444,.01,.517],120271:[.444,0,.461],120272:[.444,0,.683],120273:[.444,0,.461],120274:[.444,.204,.461],120275:[.444,0,.435],120276:[.694,0,.733],120277:[.694,0,.733],120278:[.704,.011,.703],120279:[.694,0,.794],120280:[.691,0,.642],120281:[.691,0,.611],120282:[.705,.011,.733],120283:[.694,0,.794],120284:[.694,0,.331],120285:[.694,.022,.519],120286:[.694,0,.764],120287:[.694,0,.581],120288:[.694,0,.978],120289:[.694,0,.794],120290:[.716,.022,.794],120291:[.694,0,.703],120292:[.716,.106,.794],120293:[.694,0,.703],120294:[.716,.022,.611],120295:[.688,0,.733],120296:[.694,.022,.764],120297:[.694,0,.733],120298:[.694,0,1.039],120299:[.694,0,.733],120300:[.694,0,.733],120301:[.694,0,.672],120302:[.475,.011,.525],120303:[.694,.01,.561],120304:[.475,.011,.489],120305:[.694,.011,.561],120306:[.474,.01,.511],120307:[.705,0,.336],120308:[.469,.206,.55],120309:[.694,0,.561],120310:[.695,0,.256],120311:[.695,.205,.286],120312:[.694,0,.531],120313:[.694,0,.256],120314:[.469,0,.867],120315:[.468,0,.561],120316:[.474,.011,.55],120317:[.469,.194,.561],120318:[.469,.194,.561],120319:[.469,0,.372],120320:[.474,.01,.422],120321:[.589,.01,.404],120322:[.458,.011,.561],120323:[.458,0,.5],120324:[.458,0,.744],120325:[.458,0,.5],120326:[.458,.205,.5],120327:[.458,0,.476],120328:[.694,0,.667],120329:[.694,0,.667],120330:[.705,.01,.639,{ic:.08}],120331:[.694,0,.722],120332:[.691,0,.597,{ic:.091}],120333:[.691,0,.569,{ic:.104}],120334:[.705,.011,.667,{ic:.063}],120335:[.694,0,.708,{ic:.06}],120336:[.694,0,.278,{ic:.06}],120337:[.694,.022,.472,{ic:.063}],120338:[.694,0,.694,{ic:.091}],120339:[.694,0,.542],120340:[.694,0,.875,{ic:.054}],120341:[.694,0,.708,{ic:.058}],120342:[.716,.022,.736],120343:[.694,0,.639,{ic:.051}],120344:[.716,.125,.736],120345:[.694,0,.646,{ic:.052}],120346:[.716,.022,.556,{ic:.053}],120347:[.688,0,.681,{ic:.109}],120348:[.694,.022,.688,{ic:.059}],120349:[.694,0,.667,{ic:.132}],120350:[.694,0,.944,{ic:.132}],120351:[.694,0,.667,{ic:.091}],120352:[.694,0,.667,{ic:.143}],120353:[.694,0,.611,{ic:.091}],120354:[.461,.01,.481],120355:[.694,.011,.517],120356:[.46,.011,.444,{ic:.055}],120357:[.694,.01,.517,{ic:.071}],120358:[.46,.011,.444],120359:[.705,0,.306,{ic:.188}],120360:[.455,.206,.5,{ic:.068}],120361:[.694,0,.517],120362:[.68,0,.239,{ic:.076}],120363:[.68,.204,.267,{ic:.069}],120364:[.694,0,.489,{ic:.054}],120365:[.694,0,.239,{ic:.072}],120366:[.455,0,.794],120367:[.454,0,.517],120368:[.461,.011,.5],120369:[.455,.194,.517],120370:[.455,.194,.517],120371:[.455,0,.342,{ic:.082}],120372:[.461,.011,.383,{ic:.053}],120373:[.571,.011,.361],120374:[.444,.01,.517],120375:[.444,0,.461,{ic:.079}],120376:[.444,0,.683,{ic:.079}],120377:[.444,0,.461,{ic:.076}],120378:[.444,.205,.461,{ic:.079}],120379:[.444,0,.435,{ic:.059}],120380:[.694,0,.667],120381:[.694,0,.667],120382:[.705,.01,.639,{ic:.08}],120383:[.694,0,.722],120384:[.691,0,.597,{ic:.091}],120385:[.691,0,.569,{ic:.104}],120386:[.705,.011,.667,{ic:.063}],120387:[.694,0,.708,{ic:.06}],120388:[.694,0,.278,{ic:.06}],120389:[.694,.022,.472,{ic:.063}],120390:[.694,0,.694,{ic:.091}],120391:[.694,0,.542],120392:[.694,0,.875,{ic:.054}],120393:[.694,0,.708,{ic:.058}],120394:[.716,.022,.736],120395:[.694,0,.639,{ic:.051}],120396:[.716,.125,.736],120397:[.694,0,.646,{ic:.052}],120398:[.716,.022,.556,{ic:.053}],120399:[.688,0,.681,{ic:.109}],120400:[.694,.022,.688,{ic:.059}],120401:[.694,0,.667,{ic:.132}],120402:[.694,0,.944,{ic:.132}],120403:[.694,0,.667,{ic:.091}],120404:[.694,0,.667,{ic:.143}],120405:[.694,0,.611,{ic:.091}],120406:[.461,.01,.481],120407:[.694,.011,.517],120408:[.46,.011,.444,{ic:.055}],120409:[.694,.01,.517,{ic:.071}],120410:[.46,.011,.444],120411:[.705,0,.306,{ic:.188}],120412:[.455,.206,.5,{ic:.068}],120413:[.694,0,.517],120414:[.68,0,.239,{ic:.076}],120415:[.68,.204,.267,{ic:.069}],120416:[.694,0,.489,{ic:.054}],120417:[.694,0,.239,{ic:.072}],120418:[.455,0,.794],120419:[.454,0,.517],120420:[.461,.011,.5],120421:[.455,.194,.517],120422:[.455,.194,.517],120423:[.455,0,.342,{ic:.082}],120424:[.461,.011,.383,{ic:.053}],120425:[.571,.011,.361],120426:[.444,.01,.517],120427:[.444,0,.461,{ic:.079}],120428:[.444,0,.683,{ic:.079}],120429:[.444,0,.461,{ic:.076}],120430:[.444,.205,.461,{ic:.079}],120431:[.444,0,.435,{ic:.059}],120432:[.623,0,.525],120433:[.611,0,.525],120434:[.622,.011,.525],120435:[.611,0,.525],120436:[.611,0,.525],120437:[.611,0,.525],120438:[.622,.011,.525],120439:[.611,0,.525],120440:[.611,0,.525],120441:[.611,.011,.525],120442:[.611,0,.525],120443:[.611,0,.525],120444:[.611,0,.525],120445:[.611,0,.525],120446:[.621,.01,.525],120447:[.611,0,.525],120448:[.621,.138,.525],120449:[.611,.011,.525],120450:[.622,.011,.525],120451:[.611,0,.525],120452:[.611,.011,.525],120453:[.611,.007,.525],120454:[.611,.007,.525],120455:[.611,0,.525],120456:[.611,0,.525],120457:[.611,0,.525],120458:[.439,.006,.525],120459:[.611,.006,.525],120460:[.44,.006,.525],120461:[.611,.006,.525],120462:[.44,.006,.525],120463:[.617,0,.525],120464:[.442,.229,.525],120465:[.611,0,.525],120466:[.612,0,.525],120467:[.612,.228,.525],120468:[.611,0,.525],120469:[.611,0,.525],120470:[.436,0,.525],120471:[.436,0,.525],120472:[.44,.006,.525],120473:[.437,.221,.525],120474:[.437,.221,.525],120475:[.437,0,.525],120476:[.44,.006,.525],120477:[.554,.006,.525],120478:[.431,.005,.525],120479:[.431,0,.525],120480:[.431,0,.525],120481:[.431,0,.525],120482:[.431,.228,.525],120483:[.431,0,.525],120484:[.441,.01,.307],120485:[.442,.204,.332],120488:[.698,0,.869],120489:[.686,0,.818],120490:[.68,0,.692],120491:[.698,0,.958],120492:[.68,0,.756],120493:[.686,0,.703],120494:[.686,0,.9],120495:[.696,.01,.894],120496:[.686,0,.436],120497:[.686,0,.901],120498:[.698,0,.806],120499:[.686,0,1.092],120500:[.686,0,.9],120501:[.675,0,.767],120502:[.696,.01,.864],120503:[.68,0,.9],120504:[.686,0,.786],120505:[.696,.01,.894],120506:[.686,0,.831],120507:[.675,0,.8],120508:[.697,0,.894],120509:[.686,0,.831],120510:[.686,0,.869],120511:[.686,0,.894],120512:[.696,0,.831],120513:[.686,.024,.958],120514:[.452,.008,.761,{sk:.0319}],120515:[.701,.194,.66,{sk:.0958}],120516:[.451,.211,.59],120517:[.725,.008,.522,{sk:.0639}],120518:[.461,.017,.529,{sk:.0958}],120519:[.711,.202,.508,{sk:.0958}],120520:[.452,.211,.6,{sk:.0639}],120521:[.702,.008,.562,{sk:.0958}],120522:[.452,.008,.412,{sk:.0639}],120523:[.452,.008,.668],120524:[.694,.013,.671],120525:[.452,.211,.708,{sk:.0319}],120526:[.452,0,.577,{sk:.0319}],120527:[.711,.201,.508,{sk:.128}],120528:[.452,.008,.585,{sk:.0639}],120529:[.444,.008,.682],120530:[.451,.211,.612,{sk:.0958}],120531:[.451,.105,.424,{sk:.0958}],120532:[.444,.008,.686],120533:[.444,.013,.521,{ic:.089,sk:.0319}],120534:[.453,.008,.631,{sk:.0319}],120535:[.452,.216,.747,{sk:.0958}],120536:[.452,.201,.718,{sk:.0639}],120537:[.694,.202,.758,{sk:.128}],120538:[.453,.008,.718],120539:[.71,.017,.628,{sk:.0958}],120540:[.444,.007,.483,{sk:.0639}],120541:[.701,.008,.692,{sk:.0958}],120542:[.434,.006,.667,{ic:.067}],120543:[.694,.202,.712,{sk:.0958}],120544:[.451,.194,.612,{sk:.0958}],120545:[.444,.008,.975],120546:[.716,0,.75,{sk:.139}],120547:[.683,0,.759,{sk:.0833}],120548:[.68,0,.615,{ic:.106,sk:.0833}],120549:[.716,0,.833,{sk:.167}],120550:[.68,0,.738,{sk:.0833}],120551:[.683,0,.683,{sk:.0833}],120552:[.683,0,.831,{ic:.057,sk:.0556}],120553:[.704,.022,.763,{sk:.0833}],120554:[.683,0,.44,{ic:.064,sk:.111}],120555:[.683,0,.849,{sk:.0556}],120556:[.716,0,.694,{sk:.167}],120557:[.683,0,.97,{ic:.081,sk:.0833}],120558:[.683,0,.803,{ic:.085,sk:.0833}],120559:[.677,0,.742,{sk:.0833}],120560:[.704,.022,.763,{sk:.0833}],120561:[.68,0,.831,{ic:.056,sk:.0556}],120562:[.683,0,.642,{ic:.109,sk:.0833}],120563:[.704,.022,.763,{sk:.0833}],120564:[.683,0,.78,{sk:.0833}],120565:[.677,0,.584,{ic:.12,sk:.0833}],120566:[.705,0,.583,{ic:.117,sk:.0556}],120567:[.683,0,.667,{sk:.0833}],120568:[.683,0,.828,{sk:.0833}],120569:[.683,0,.612,{ic:.08,sk:.0556}],120570:[.704,0,.772,{sk:.0833}],120571:[.683,.033,.833],120572:[.442,.011,.64,{sk:.0278}],120573:[.705,.194,.566,{sk:.0833}],120574:[.441,.216,.518],120575:[.717,.01,.444,{sk:.0556}],120576:[.452,.022,.466,{sk:.0833}],120577:[.704,.204,.438,{sk:.0833}],120578:[.442,.216,.497,{sk:.0556}],120579:[.705,.01,.469,{sk:.0833}],120580:[.442,.01,.354,{sk:.0556}],120581:[.442,.011,.576],120582:[.694,.012,.583],120583:[.442,.216,.603,{sk:.0278}],120584:[.442,0,.494,{sk:.0278}],120585:[.704,.205,.438,{sk:.111}],120586:[.441,.011,.485,{sk:.0556}],120587:[.431,.011,.57],120588:[.442,.216,.517,{sk:.0833}],120589:[.442,.107,.363,{sk:.0833}],120590:[.431,.011,.571],120591:[.431,.013,.437,{ic:.08,sk:.0278}],120592:[.443,.01,.54,{sk:.0278}],120593:[.442,.218,.654,{sk:.0833}],120594:[.442,.204,.626,{sk:.0556}],120595:[.694,.205,.651,{sk:.111}],120596:[.443,.011,.622],120597:[.715,.022,.531,{sk:.0833}],120598:[.431,.011,.406,{sk:.0556}],120599:[.705,.011,.591,{sk:.0833}],120600:[.434,.006,.667,{ic:.067}],120601:[.694,.205,.596,{sk:.0833}],120602:[.442,.194,.517,{sk:.0833}],120603:[.431,.01,.828],120604:[.711,0,.869,{sk:.16}],120605:[.686,0,.866,{sk:.0958}],120606:[.68,0,.657,{ic:.12,sk:.0958}],120607:[.711,0,.958,{sk:.192}],120608:[.68,0,.81,{sk:.0958}],120609:[.686,0,.773,{sk:.0958}],120610:[.686,0,.982,{sk:.0639}],120611:[.702,.017,.867,{sk:.0958}],120612:[.686,0,.511,{ic:.062,sk:.128}],120613:[.686,0,.971,{sk:.0639}],120614:[.711,0,.806,{sk:.192}],120615:[.686,0,1.142,{ic:.077,sk:.0958}],120616:[.686,0,.95,{ic:.077,sk:.0958}],120617:[.675,0,.841,{sk:.0958}],120618:[.703,.017,.837,{sk:.0958}],120619:[.68,0,.982,{sk:.0639}],120620:[.686,0,.723,{ic:.124,sk:.0958}],120621:[.702,.017,.867,{sk:.0958}],120622:[.686,0,.885,{sk:.0958}],120623:[.675,0,.637,{ic:.135,sk:.0958}],120624:[.703,0,.671,{ic:.131,sk:.0639}],120625:[.686,0,.767,{sk:.0958}],120626:[.686,0,.947,{sk:.0958}],120627:[.686,0,.714,{ic:.076,sk:.0639}],120628:[.703,0,.879,{sk:.0958}],120629:[.683,.033,.833],120630:[.452,.008,.761,{sk:.0319}],120631:[.701,.194,.66,{sk:.0958}],120632:[.451,.211,.59],120633:[.725,.008,.522,{sk:.0639}],120634:[.461,.017,.529,{sk:.0958}],120635:[.711,.202,.508,{sk:.0958}],120636:[.452,.211,.6,{sk:.0639}],120637:[.702,.008,.562,{sk:.0958}],120638:[.452,.008,.412,{sk:.0639}],120639:[.452,.008,.668],120640:[.694,.013,.671],120641:[.452,.211,.708,{sk:.0319}],120642:[.452,0,.577,{sk:.0319}],120643:[.711,.201,.508,{sk:.128}],120644:[.452,.008,.585,{sk:.0639}],120645:[.444,.008,.682],120646:[.451,.211,.612,{sk:.0958}],120647:[.451,.105,.424,{sk:.0958}],120648:[.444,.008,.686],120649:[.444,.013,.521,{ic:.089,sk:.0319}],120650:[.453,.008,.631,{sk:.0319}],120651:[.452,.216,.747,{sk:.0958}],120652:[.452,.201,.718,{sk:.0639}],120653:[.694,.202,.758,{sk:.128}],120654:[.453,.008,.718],120655:[.715,.022,.531,{sk:.0833}],120656:[.444,.007,.483,{sk:.0639}],120657:[.701,.008,.692,{sk:.0958}],120658:[.434,.006,.667,{ic:.067}],120659:[.694,.202,.712,{sk:.0958}],120660:[.451,.194,.612,{sk:.0958}],120661:[.444,.008,.975],120662:[.694,0,.733],120663:[.694,0,.733],120664:[.691,0,.581],120665:[.694,0,.917],120666:[.691,0,.642],120667:[.694,0,.672],120668:[.694,0,.794],120669:[.716,.022,.856],120670:[.694,0,.331],120671:[.694,0,.764],120672:[.694,0,.672],120673:[.694,0,.978],120674:[.694,0,.794],120675:[.688,0,.733],120676:[.716,.022,.794],120677:[.691,0,.794],120678:[.694,0,.703],120679:[.716,.022,.856],120680:[.694,0,.794],120681:[.688,0,.733],120682:[.715,0,.856],120683:[.694,0,.794],120684:[.694,0,.733],120685:[.694,0,.856],120686:[.716,0,.794],120687:[.683,.033,.833],120688:[.452,.008,.761,{sk:.0319}],120689:[.701,.194,.66,{sk:.0958}],120690:[.451,.211,.59],120691:[.725,.008,.522,{sk:.0639}],120692:[.461,.017,.529,{sk:.0958}],120693:[.711,.202,.508,{sk:.0958}],120694:[.452,.211,.6,{sk:.0639}],120695:[.702,.008,.562,{sk:.0958}],120696:[.452,.008,.412,{sk:.0639}],120697:[.452,.008,.668],120698:[.694,.013,.671],120699:[.452,.211,.708,{sk:.0319}],120700:[.452,0,.577,{sk:.0319}],120701:[.711,.201,.508,{sk:.128}],120702:[.452,.008,.585,{sk:.0639}],120703:[.444,.008,.682],120704:[.451,.211,.612,{sk:.0958}],120705:[.451,.105,.424,{sk:.0958}],120706:[.444,.008,.686],120707:[.444,.013,.521,{ic:.089,sk:.0319}],120708:[.453,.008,.631,{sk:.0319}],120709:[.452,.216,.747,{sk:.0958}],120710:[.452,.201,.718,{sk:.0639}],120711:[.694,.202,.758,{sk:.128}],120712:[.453,.008,.718],120713:[.715,.022,.531,{sk:.0833}],120714:[.444,.007,.483,{sk:.0639}],120715:[.701,.008,.692,{sk:.0958}],120716:[.434,.006,.667,{ic:.067}],120717:[.694,.202,.712,{sk:.0958}],120718:[.451,.194,.612,{sk:.0958}],120719:[.444,.008,.975],120720:[.694,0,.667],120721:[.694,0,.667],120722:[.691,0,.542,{ic:.104}],120723:[.694,0,.833],120724:[.691,0,.597,{ic:.091}],120725:[.694,0,.611,{ic:.091}],120726:[.694,0,.708,{ic:.06}],120727:[.715,.022,.778],120728:[.694,0,.278,{ic:.06}],120729:[.694,0,.694,{ic:.091}],120730:[.694,0,.611],120731:[.694,0,.875,{ic:.054}],120732:[.694,0,.708,{ic:.058}],120733:[.688,0,.667,{ic:.098}],120734:[.716,.022,.736],120735:[.691,0,.708,{ic:.06}],120736:[.694,0,.639,{ic:.051}],120737:[.715,.022,.778],120738:[.694,0,.722,{ic:.091}],120739:[.688,0,.681,{ic:.109}],120740:[.716,0,.778,{ic:.065}],120741:[.694,0,.722],120742:[.694,0,.667,{ic:.091}],120743:[.694,0,.778,{ic:.076}],120744:[.716,0,.722],120745:[.683,.033,.833],120746:[.452,.008,.761,{sk:.0319}],120747:[.701,.194,.66,{sk:.0958}],120748:[.451,.211,.59],120749:[.725,.008,.522,{sk:.0639}],120750:[.461,.017,.529,{sk:.0958}],120751:[.711,.202,.508,{sk:.0958}],120752:[.452,.211,.6,{sk:.0639}],120753:[.702,.008,.562,{sk:.0958}],120754:[.452,.008,.412,{sk:.0639}],120755:[.452,.008,.668],120756:[.694,.013,.671],120757:[.452,.211,.708,{sk:.0319}],120758:[.452,0,.577,{sk:.0319}],120759:[.711,.201,.508,{sk:.128}],120760:[.452,.008,.585,{sk:.0639}],120761:[.444,.008,.682],120762:[.451,.211,.612,{sk:.0958}],120763:[.451,.105,.424,{sk:.0958}],120764:[.444,.008,.686],120765:[.444,.013,.521,{ic:.089,sk:.0319}],120766:[.453,.008,.631,{sk:.0319}],120767:[.452,.216,.747,{sk:.0958}],120768:[.452,.201,.718,{sk:.0639}],120769:[.694,.202,.758,{sk:.128}],120770:[.453,.008,.718],120771:[.715,.022,.531,{sk:.0833}],120772:[.444,.007,.483,{sk:.0639}],120773:[.701,.008,.692,{sk:.0958}],120774:[.434,.006,.667,{ic:.067}],120775:[.694,.202,.712,{sk:.0958}],120776:[.451,.194,.612,{sk:.0958}],120777:[.444,.008,.975],120778:[.68,0,.643,{ic:.106,sk:.0833}],120779:[.605,.085,.778],120782:[.654,.01,.575],120783:[.655,0,.575],120784:[.654,0,.575],120785:[.655,.011,.575],120786:[.656,0,.575],120787:[.655,.011,.575],120788:[.655,.011,.575],120789:[.676,.011,.575],120790:[.654,.011,.575],120791:[.654,.011,.575],120792:[.654,.01,.575],120793:[.655,0,.575],120794:[.654,0,.575],120795:[.655,.011,.575],120796:[.656,0,.575],120797:[.655,.011,.575],120798:[.655,.011,.575],120799:[.676,.011,.575],120800:[.654,.011,.575],120801:[.654,.011,.575],120802:[.678,.022,.5],120803:[.678,0,.5],120804:[.677,0,.5],120805:[.678,.022,.5],120806:[.656,0,.5],120807:[.656,.021,.5],120808:[.677,.022,.5],120809:[.656,.011,.5],120810:[.678,.022,.5],120811:[.677,.022,.5],120812:[.715,.022,.55],120813:[.716,0,.55],120814:[.716,0,.55],120815:[.716,.022,.55],120816:[.694,0,.55],120817:[.694,.022,.55],120818:[.716,.022,.55],120819:[.695,.011,.55],120820:[.715,.022,.55],120821:[.716,.022,.55],120822:[.621,.01,.525],120823:[.622,0,.525],120824:[.622,0,.525],120825:[.622,.011,.525],120826:[.624,0,.525],120827:[.611,.01,.525],120828:[.622,.011,.525],120829:[.627,.01,.525],120830:[.621,.01,.525],120831:[.622,.011,.525]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(195);e.sansSerifBoldItalic=n.AddCSS(i.sansSerifBoldItalic,{32:{c:" "},33:{c:"!"},35:{c:"#"},36:{c:"$"},37:{c:"%"},38:{c:"&"},40:{c:"("},41:{c:")"},42:{c:"*"},43:{c:"+"},44:{c:","},45:{c:"-"},46:{c:"."},47:{c:"/"},48:{c:"0"},49:{c:"1"},50:{c:"2"},51:{c:"3"},52:{c:"4"},53:{c:"5"},54:{c:"6"},55:{c:"7"},56:{c:"8"},57:{c:"9"},58:{c:":"},59:{c:";"},61:{c:"="},63:{c:"?"},64:{c:"@"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},91:{c:"["},93:{c:"]"},94:{c:"^"},95:{c:"_"},97:{c:"a"},98:{c:"b"},99:{c:"c"},100:{c:"d"},101:{c:"e"},102:{c:"f"},103:{c:"g"},104:{c:"h"},105:{c:"i"},106:{c:"j"},107:{c:"k"},108:{c:"l"},109:{c:"m"},110:{c:"n"},111:{c:"o"},112:{c:"p"},113:{c:"q"},114:{c:"r"},115:{c:"s"},116:{c:"t"},117:{c:"u"},118:{c:"v"},119:{c:"w"},120:{c:"x"},121:{c:"y"},122:{c:"z"},126:{c:"~"},913:{c:"A"},914:{c:"B"},917:{c:"E"},918:{c:"Z"},919:{c:"H"},921:{c:"I"},922:{c:"K"},924:{c:"M"},925:{c:"N"},927:{c:"O"},929:{c:"P"},930:{c:"\\398"},932:{c:"T"},935:{c:"X"},978:{c:"\\3A5"},988:{c:"F"},8213:{c:"\\2014"},8215:{c:"_"},8260:{c:"/"},8710:{c:"\\394"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.sansSerifBoldItalic={32:[0,0,.25],33:[.694,0,.319],34:[.694,-.471,.5],35:[.694,.194,.833],36:[.75,.056,.5,{ic:.065}],37:[.75,.056,.833],38:[.716,.022,.758],39:[.694,-.471,.278,{ic:.057}],40:[.75,.25,.389,{ic:.102}],41:[.75,.25,.389],42:[.75,-.306,.5,{ic:.068}],43:[.583,.083,.778],44:[.098,.125,.278],45:[.259,-.186,.333],46:[.098,0,.278],47:[.75,.25,.5,{ic:.1}],48:[.678,.022,.5],49:[.678,0,.5],50:[.678,0,.5,{ic:.051}],51:[.678,.022,.5],52:[.656,0,.5],53:[.656,.022,.5,{ic:.055}],54:[.678,.022,.5],55:[.656,.011,.5,{ic:.096}],56:[.678,.022,.5,{ic:.054}],57:[.677,.022,.5],58:[.444,0,.278],59:[.444,.125,.278],61:[.37,-.13,.778],63:[.704,0,.472,{ic:.064}],64:[.705,.01,.667],65:[.694,0,.667],66:[.694,0,.667],67:[.705,.01,.639,{ic:.08}],68:[.694,0,.722],69:[.691,0,.597,{ic:.091}],70:[.691,0,.569,{ic:.104}],71:[.705,.011,.667,{ic:.063}],72:[.694,0,.708,{ic:.06}],73:[.694,0,.278,{ic:.06}],74:[.694,.022,.472,{ic:.063}],75:[.694,0,.694,{ic:.091}],76:[.694,0,.542],77:[.694,0,.875,{ic:.054}],78:[.694,0,.708,{ic:.058}],79:[.716,.022,.736],80:[.694,0,.639,{ic:.051}],81:[.716,.125,.736],82:[.694,0,.646,{ic:.052}],83:[.716,.022,.556,{ic:.053}],84:[.688,0,.681,{ic:.109}],85:[.694,.022,.688,{ic:.059}],86:[.694,0,.667,{ic:.132}],87:[.694,0,.944,{ic:.132}],88:[.694,0,.667,{ic:.091}],89:[.694,0,.667,{ic:.143}],90:[.694,0,.611,{ic:.091}],91:[.75,.25,.289,{ic:.136}],93:[.75,.25,.289,{ic:.064}],94:[.694,-.527,.5],95:[-.038,.114,.5,{ic:.065}],97:[.461,.01,.481],98:[.694,.011,.517],99:[.46,.011,.444,{ic:.055}],100:[.694,.01,.517,{ic:.071}],101:[.46,.011,.444],102:[.705,0,.306,{ic:.188}],103:[.455,.206,.5,{ic:.068}],104:[.694,0,.517],105:[.68,0,.239,{ic:.076}],106:[.68,.204,.267,{ic:.069}],107:[.694,0,.489,{ic:.054}],108:[.694,0,.239,{ic:.072}],109:[.455,0,.794],110:[.454,0,.517],111:[.461,.011,.5],112:[.455,.194,.517],113:[.455,.194,.517],114:[.455,0,.342,{ic:.082}],115:[.461,.011,.383,{ic:.053}],116:[.571,.011,.361],117:[.444,.01,.517],118:[.444,0,.461,{ic:.079}],119:[.444,0,.683,{ic:.079}],120:[.444,0,.461,{ic:.076}],121:[.444,.205,.461,{ic:.079}],122:[.444,0,.435,{ic:.059}],126:[.327,-.193,.5,{ic:.06}],160:[0,0,.25],305:[.444,0,.239],567:[.444,.204,.267],768:[.694,-.527,0],769:[.694,-.527,0,{ic:.063}],770:[.694,-.527,0],771:[.677,-.543,0,{ic:.06}],772:[.631,-.552,0,{ic:.064}],774:[.694,-.508,0,{ic:.073}],775:[.68,-.576,0],776:[.68,-.582,0],778:[.693,-.527,0],779:[.694,-.527,0,{ic:.063}],780:[.654,-.487,0,{ic:.06}],913:[.694,0,.667],914:[.694,0,.667],915:[.691,0,.542,{ic:.104}],916:[.694,0,.833],917:[.691,0,.597,{ic:.091}],918:[.694,0,.611,{ic:.091}],919:[.694,0,.708,{ic:.06}],920:[.715,.022,.778],921:[.694,0,.278,{ic:.06}],922:[.694,0,.694,{ic:.091}],923:[.694,0,.611],924:[.694,0,.875,{ic:.054}],925:[.694,0,.708,{ic:.058}],926:[.688,0,.667,{ic:.098}],927:[.716,.022,.736],928:[.691,0,.708,{ic:.06}],929:[.694,0,.639,{ic:.051}],930:[.715,.022,.778],931:[.694,0,.722,{ic:.091}],932:[.688,0,.681,{ic:.109}],933:[.716,0,.778,{ic:.065}],934:[.694,0,.722],935:[.694,0,.667,{ic:.091}],936:[.694,0,.778,{ic:.076}],937:[.716,0,.722],978:[.716,0,.778,{ic:.065}],988:[.691,0,.569,{ic:.104}],8211:[.312,-.236,.5,{ic:.065}],8212:[.312,-.236,1,{ic:.065}],8213:[.312,-.236,1,{ic:.065}],8215:[-.038,.114,.5,{ic:.065}],8216:[.694,-.471,.278,{ic:.058}],8217:[.694,-.471,.278,{ic:.057}],8220:[.694,-.471,.5,{ic:.114}],8221:[.694,-.471,.5],8260:[.75,.25,.5,{ic:.1}],8710:[.694,0,.833]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(197);e.sansSerifBold=n.AddCSS(i.sansSerifBold,{32:{c:" "},33:{c:"!"},35:{c:"#"},36:{c:"$"},37:{c:"%"},38:{c:"&"},40:{c:"("},41:{c:")"},42:{c:"*"},43:{c:"+"},44:{c:","},45:{c:"-"},46:{c:"."},47:{c:"/"},48:{c:"0"},49:{c:"1"},50:{c:"2"},51:{c:"3"},52:{c:"4"},53:{c:"5"},54:{c:"6"},55:{c:"7"},56:{c:"8"},57:{c:"9"},58:{c:":"},59:{c:";"},61:{c:"="},63:{c:"?"},64:{c:"@"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},91:{c:"["},93:{c:"]"},94:{c:"^"},95:{c:"_"},97:{c:"a"},98:{c:"b"},99:{c:"c"},100:{c:"d"},101:{c:"e"},102:{c:"f"},103:{c:"g"},104:{c:"h"},105:{c:"i"},106:{c:"j"},107:{c:"k"},108:{c:"l"},109:{c:"m"},110:{c:"n"},111:{c:"o"},112:{c:"p"},113:{c:"q"},114:{c:"r"},115:{c:"s"},116:{c:"t"},117:{c:"u"},118:{c:"v"},119:{c:"w"},120:{c:"x"},121:{c:"y"},122:{c:"z"},126:{c:"~"},913:{c:"A"},914:{c:"B"},917:{c:"E"},918:{c:"Z"},919:{c:"H"},921:{c:"I"},922:{c:"K"},924:{c:"M"},925:{c:"N"},927:{c:"O"},929:{c:"P"},930:{c:"\\398"},932:{c:"T"},935:{c:"X"},978:{c:"\\3A5"},988:{c:"F"},8213:{c:"\\2014"},8215:{c:"_"},8260:{c:"/"},8710:{c:"\\394"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.sansSerifBold={32:[0,0,.25],33:[.694,0,.367],34:[.694,-.442,.558],35:[.694,.193,.917],36:[.75,.056,.55],37:[.75,.056,1.029],38:[.716,.022,.831],39:[.694,-.442,.306],40:[.75,.249,.428],41:[.75,.25,.428],42:[.75,-.293,.55],43:[.617,.116,.856],44:[.146,.106,.306],45:[.273,-.186,.367],46:[.146,0,.306],47:[.75,.249,.55],48:[.715,.022,.55],49:[.716,0,.55],50:[.716,0,.55],51:[.716,.022,.55],52:[.694,0,.55],53:[.694,.022,.55],54:[.716,.022,.55],55:[.695,.011,.55],56:[.715,.022,.55],57:[.716,.022,.55],58:[.458,0,.306],59:[.458,.106,.306],61:[.407,-.094,.856],63:[.705,0,.519],64:[.704,.011,.733],65:[.694,0,.733],66:[.694,0,.733],67:[.704,.011,.703],68:[.694,0,.794],69:[.691,0,.642],70:[.691,0,.611],71:[.705,.011,.733],72:[.694,0,.794],73:[.694,0,.331],74:[.694,.022,.519],75:[.694,0,.764],76:[.694,0,.581],77:[.694,0,.978],78:[.694,0,.794],79:[.716,.022,.794],80:[.694,0,.703],81:[.716,.106,.794],82:[.694,0,.703],83:[.716,.022,.611],84:[.688,0,.733],85:[.694,.022,.764],86:[.694,0,.733],87:[.694,0,1.039],88:[.694,0,.733],89:[.694,0,.733],90:[.694,0,.672],91:[.75,.25,.343],93:[.75,.25,.343],94:[.694,-.537,.55],95:[-.023,.11,.55],97:[.475,.011,.525],98:[.694,.01,.561],99:[.475,.011,.489],100:[.694,.011,.561],101:[.474,.01,.511],102:[.705,0,.336],103:[.469,.206,.55],104:[.694,0,.561],105:[.695,0,.256],106:[.695,.205,.286],107:[.694,0,.531],108:[.694,0,.256],109:[.469,0,.867],110:[.468,0,.561],111:[.474,.011,.55],112:[.469,.194,.561],113:[.469,.194,.561],114:[.469,0,.372],115:[.474,.01,.422],116:[.589,.01,.404],117:[.458,.011,.561],118:[.458,0,.5],119:[.458,0,.744],120:[.458,0,.5],121:[.458,.205,.5],122:[.458,0,.476],126:[.344,-.198,.55],160:[0,0,.25],305:[.458,0,.256],567:[.458,.205,.286],768:[.694,-.537,0],769:[.694,-.537,0],770:[.694,-.537,0],771:[.694,-.548,0],772:[.66,-.56,0],774:[.694,-.552,0],775:[.695,-.596,0],776:[.695,-.595,0],778:[.694,-.538,0],779:[.694,-.537,0],780:[.657,-.5,0],913:[.694,0,.733],914:[.694,0,.733],915:[.691,0,.581],916:[.694,0,.917],917:[.691,0,.642],918:[.694,0,.672],919:[.694,0,.794],920:[.716,.022,.856],921:[.694,0,.331],922:[.694,0,.764],923:[.694,0,.672],924:[.694,0,.978],925:[.694,0,.794],926:[.688,0,.733],927:[.716,.022,.794],928:[.691,0,.794],929:[.694,0,.703],930:[.716,.022,.856],931:[.694,0,.794],932:[.688,0,.733],933:[.715,0,.856],934:[.694,0,.794],935:[.694,0,.733],936:[.694,0,.856],937:[.716,0,.794],978:[.715,0,.856],988:[.691,0,.611],8211:[.327,-.24,.55],8212:[.327,-.24,1.1],8213:[.327,-.24,1.1],8215:[-.023,.11,.55],8216:[.694,-.443,.306],8217:[.694,-.442,.306],8220:[.694,-.443,.558],8221:[.694,-.442,.558],8260:[.75,.249,.55],8710:[.694,0,.917]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(199);e.sansSerifItalic=n.AddCSS(i.sansSerifItalic,{32:{c:" "},33:{c:"!"},35:{c:"#"},36:{c:"$"},37:{c:"%"},38:{c:"&"},40:{c:"("},41:{c:")"},42:{c:"*"},43:{c:"+"},44:{c:","},45:{c:"-"},46:{c:"."},47:{c:"/"},48:{c:"0"},49:{c:"1"},50:{c:"2"},51:{c:"3"},52:{c:"4"},53:{c:"5"},54:{c:"6"},55:{c:"7"},56:{c:"8"},57:{c:"9"},58:{c:":"},59:{c:";"},61:{c:"="},63:{c:"?"},64:{c:"@"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},91:{c:"["},93:{c:"]"},94:{c:"^"},95:{c:"_"},97:{c:"a"},98:{c:"b"},99:{c:"c"},100:{c:"d"},101:{c:"e"},102:{c:"f"},103:{c:"g"},104:{c:"h"},105:{c:"i"},106:{c:"j"},107:{c:"k"},108:{c:"l"},109:{c:"m"},110:{c:"n"},111:{c:"o"},112:{c:"p"},113:{c:"q"},114:{c:"r"},115:{c:"s"},116:{c:"t"},117:{c:"u"},118:{c:"v"},119:{c:"w"},120:{c:"x"},121:{c:"y"},122:{c:"z"},126:{c:"~"},913:{c:"A"},914:{c:"B"},917:{c:"E"},918:{c:"Z"},919:{c:"H"},921:{c:"I"},922:{c:"K"},924:{c:"M"},925:{c:"N"},927:{c:"O"},929:{c:"P"},930:{c:"\\398"},932:{c:"T"},935:{c:"X"},978:{c:"\\3A5"},988:{c:"F"},8213:{c:"\\2014"},8215:{c:"_"},8260:{c:"/"},8710:{c:"\\394"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.sansSerifItalic={32:[0,0,.25],33:[.694,0,.319],34:[.694,-.471,.5],35:[.694,.194,.833],36:[.75,.056,.5,{ic:.065}],37:[.75,.056,.833],38:[.716,.022,.758],39:[.694,-.471,.278,{ic:.057}],40:[.75,.25,.389,{ic:.102}],41:[.75,.25,.389],42:[.75,-.306,.5,{ic:.068}],43:[.583,.083,.778],44:[.098,.125,.278],45:[.259,-.186,.333],46:[.098,0,.278],47:[.75,.25,.5,{ic:.1}],48:[.678,.022,.5],49:[.678,0,.5],50:[.678,0,.5,{ic:.051}],51:[.678,.022,.5],52:[.656,0,.5],53:[.656,.022,.5,{ic:.055}],54:[.678,.022,.5],55:[.656,.011,.5,{ic:.096}],56:[.678,.022,.5,{ic:.054}],57:[.677,.022,.5],58:[.444,0,.278],59:[.444,.125,.278],61:[.37,-.13,.778],63:[.704,0,.472,{ic:.064}],64:[.705,.01,.667],65:[.694,0,.667],66:[.694,0,.667],67:[.705,.01,.639,{ic:.08}],68:[.694,0,.722],69:[.691,0,.597,{ic:.091}],70:[.691,0,.569,{ic:.104}],71:[.705,.011,.667,{ic:.063}],72:[.694,0,.708,{ic:.06}],73:[.694,0,.278,{ic:.06}],74:[.694,.022,.472,{ic:.063}],75:[.694,0,.694,{ic:.091}],76:[.694,0,.542],77:[.694,0,.875,{ic:.054}],78:[.694,0,.708,{ic:.058}],79:[.716,.022,.736],80:[.694,0,.639,{ic:.051}],81:[.716,.125,.736],82:[.694,0,.646,{ic:.052}],83:[.716,.022,.556,{ic:.053}],84:[.688,0,.681,{ic:.109}],85:[.694,.022,.688,{ic:.059}],86:[.694,0,.667,{ic:.132}],87:[.694,0,.944,{ic:.132}],88:[.694,0,.667,{ic:.091}],89:[.694,0,.667,{ic:.143}],90:[.694,0,.611,{ic:.091}],91:[.75,.25,.289,{ic:.136}],93:[.75,.25,.289,{ic:.064}],94:[.694,-.527,.5],95:[-.038,.114,.5,{ic:.065}],97:[.461,.01,.481],98:[.694,.011,.517],99:[.46,.011,.444,{ic:.055}],100:[.694,.01,.517,{ic:.071}],101:[.46,.011,.444],102:[.705,0,.306,{ic:.188}],103:[.455,.206,.5,{ic:.068}],104:[.694,0,.517],105:[.68,0,.239,{ic:.076}],106:[.68,.204,.267,{ic:.069}],107:[.694,0,.489,{ic:.054}],108:[.694,0,.239,{ic:.072}],109:[.455,0,.794],110:[.454,0,.517],111:[.461,.011,.5],112:[.455,.194,.517],113:[.455,.194,.517],114:[.455,0,.342,{ic:.082}],115:[.461,.011,.383,{ic:.053}],116:[.571,.011,.361],117:[.444,.01,.517],118:[.444,0,.461,{ic:.079}],119:[.444,0,.683,{ic:.079}],120:[.444,0,.461,{ic:.076}],121:[.444,.205,.461,{ic:.079}],122:[.444,0,.435,{ic:.059}],126:[.327,-.193,.5,{ic:.06}],160:[0,0,.25],305:[.444,0,.239],567:[.444,.204,.267],768:[.694,-.527,0],769:[.694,-.527,0,{ic:.063}],770:[.694,-.527,0],771:[.677,-.543,0,{ic:.06}],772:[.631,-.552,0,{ic:.064}],774:[.694,-.508,0,{ic:.073}],775:[.68,-.576,0],776:[.68,-.582,0],778:[.693,-.527,0],779:[.694,-.527,0,{ic:.063}],780:[.654,-.487,0,{ic:.06}],913:[.694,0,.667],914:[.694,0,.667],915:[.691,0,.542,{ic:.104}],916:[.694,0,.833],917:[.691,0,.597,{ic:.091}],918:[.694,0,.611,{ic:.091}],919:[.694,0,.708,{ic:.06}],920:[.715,.022,.778],921:[.694,0,.278,{ic:.06}],922:[.694,0,.694,{ic:.091}],923:[.694,0,.611],924:[.694,0,.875,{ic:.054}],925:[.694,0,.708,{ic:.058}],926:[.688,0,.667,{ic:.098}],927:[.716,.022,.736],928:[.691,0,.708,{ic:.06}],929:[.694,0,.639,{ic:.051}],930:[.715,.022,.778],931:[.694,0,.722,{ic:.091}],932:[.688,0,.681,{ic:.109}],933:[.716,0,.778,{ic:.065}],934:[.694,0,.722],935:[.694,0,.667,{ic:.091}],936:[.694,0,.778,{ic:.076}],937:[.716,0,.722],978:[.716,0,.778,{ic:.065}],988:[.691,0,.569,{ic:.104}],8211:[.312,-.236,.5,{ic:.065}],8212:[.312,-.236,1,{ic:.065}],8213:[.312,-.236,1,{ic:.065}],8215:[-.038,.114,.5,{ic:.065}],8216:[.694,-.471,.278,{ic:.058}],8217:[.694,-.471,.278,{ic:.057}],8220:[.694,-.471,.5,{ic:.114}],8221:[.694,-.471,.5],8260:[.75,.25,.5,{ic:.1}],8710:[.694,0,.833]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(201);e.sansSerif=n.AddCSS(i.sansSerif,{32:{c:" "},33:{c:"!"},35:{c:"#"},36:{c:"$"},37:{c:"%"},38:{c:"&"},40:{c:"("},41:{c:")"},42:{c:"*"},43:{c:"+"},44:{c:","},45:{c:"-"},46:{c:"."},47:{c:"/"},48:{c:"0"},49:{c:"1"},50:{c:"2"},51:{c:"3"},52:{c:"4"},53:{c:"5"},54:{c:"6"},55:{c:"7"},56:{c:"8"},57:{c:"9"},58:{c:":"},59:{c:";"},61:{c:"="},63:{c:"?"},64:{c:"@"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},91:{c:"["},93:{c:"]"},94:{c:"^"},95:{c:"_"},97:{c:"a"},98:{c:"b"},99:{c:"c"},100:{c:"d"},101:{c:"e"},102:{c:"f"},103:{c:"g"},104:{c:"h"},105:{c:"i"},106:{c:"j"},107:{c:"k"},108:{c:"l"},109:{c:"m"},110:{c:"n"},111:{c:"o"},112:{c:"p"},113:{c:"q"},114:{c:"r"},115:{c:"s"},116:{c:"t"},117:{c:"u"},118:{c:"v"},119:{c:"w"},120:{c:"x"},121:{c:"y"},122:{c:"z"},126:{c:"~"},913:{c:"A"},914:{c:"B"},917:{c:"E"},918:{c:"Z"},919:{c:"H"},921:{c:"I"},922:{c:"K"},924:{c:"M"},925:{c:"N"},927:{c:"O"},929:{c:"P"},930:{c:"\\398"},932:{c:"T"},935:{c:"X"},978:{c:"\\3A5"},988:{c:"F"},8213:{c:"\\2014"},8215:{c:"_"},8260:{c:"/"},8710:{c:"\\394"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.sansSerif={32:[0,0,.25],33:[.694,0,.319],34:[.694,-.471,.5],35:[.694,.194,.833],36:[.75,.056,.5],37:[.75,.056,.833],38:[.716,.022,.758],39:[.694,-.471,.278],40:[.75,.25,.389],41:[.75,.25,.389],42:[.75,-.306,.5],43:[.583,.082,.778],44:[.098,.125,.278],45:[.259,-.186,.333],46:[.098,0,.278],47:[.75,.25,.5],48:[.678,.022,.5],49:[.678,0,.5],50:[.677,0,.5],51:[.678,.022,.5],52:[.656,0,.5],53:[.656,.021,.5],54:[.677,.022,.5],55:[.656,.011,.5],56:[.678,.022,.5],57:[.677,.022,.5],58:[.444,0,.278],59:[.444,.125,.278],61:[.37,-.13,.778],63:[.704,0,.472],64:[.704,.011,.667],65:[.694,0,.667],66:[.694,0,.667],67:[.705,.011,.639],68:[.694,0,.722],69:[.691,0,.597],70:[.691,0,.569],71:[.704,.011,.667],72:[.694,0,.708],73:[.694,0,.278],74:[.694,.022,.472],75:[.694,0,.694],76:[.694,0,.542],77:[.694,0,.875],78:[.694,0,.708],79:[.715,.022,.736],80:[.694,0,.639],81:[.715,.125,.736],82:[.694,0,.646],83:[.716,.022,.556],84:[.688,0,.681],85:[.694,.022,.688],86:[.694,0,.667],87:[.694,0,.944],88:[.694,0,.667],89:[.694,0,.667],90:[.694,0,.611],91:[.75,.25,.289],93:[.75,.25,.289],94:[.694,-.527,.5],95:[-.038,.114,.5],97:[.46,.01,.481],98:[.694,.011,.517],99:[.46,.01,.444],100:[.694,.01,.517],101:[.461,.01,.444],102:[.705,0,.306],103:[.455,.206,.5],104:[.694,0,.517],105:[.68,0,.239],106:[.68,.205,.267],107:[.694,0,.489],108:[.694,0,.239],109:[.455,0,.794],110:[.455,0,.517],111:[.46,.01,.5],112:[.455,.194,.517],113:[.455,.194,.517],114:[.455,0,.342],115:[.46,.01,.383],116:[.571,.01,.361],117:[.444,.01,.517],118:[.444,0,.461],119:[.444,0,.683],120:[.444,0,.461],121:[.444,.204,.461],122:[.444,0,.435],126:[.327,-.193,.5],160:[0,0,.25],305:[.444,0,.239],567:[.444,.205,.267],768:[.694,-.527,0],769:[.694,-.527,0],770:[.694,-.527,0],771:[.677,-.543,0],772:[.631,-.552,0],774:[.694,-.508,0],775:[.68,-.576,0],776:[.68,-.582,0],778:[.694,-.527,0],779:[.694,-.527,0],780:[.654,-.487,0],913:[.694,0,.667],914:[.694,0,.667],915:[.691,0,.542],916:[.694,0,.833],917:[.691,0,.597],918:[.694,0,.611],919:[.694,0,.708],920:[.716,.021,.778],921:[.694,0,.278],922:[.694,0,.694],923:[.694,0,.611],924:[.694,0,.875],925:[.694,0,.708],926:[.688,0,.667],927:[.715,.022,.736],928:[.691,0,.708],929:[.694,0,.639],930:[.716,.021,.778],931:[.694,0,.722],932:[.688,0,.681],933:[.716,0,.778],934:[.694,0,.722],935:[.694,0,.667],936:[.694,0,.778],937:[.716,0,.722],978:[.716,0,.778],988:[.691,0,.569],8211:[.312,-.236,.5],8212:[.312,-.236,1],8213:[.312,-.236,1],8215:[-.038,.114,.5],8216:[.694,-.471,.278],8217:[.694,-.471,.278],8220:[.694,-.471,.5],8221:[.694,-.471,.5],8260:[.75,.25,.5],8710:[.694,0,.833]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(203);e.scriptBold=n.AddCSS(i.scriptBold,{32:{c:" "},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},913:{c:"A",f:"B"},914:{c:"B",f:"B"},917:{c:"E",f:"B"},918:{c:"Z",f:"B"},919:{c:"H",f:"B"},921:{c:"I",f:"B"},922:{c:"K",f:"B"},924:{c:"M",f:"B"},925:{c:"N",f:"B"},927:{c:"O",f:"B"},929:{c:"P",f:"B"},930:{c:"\\398",f:"B"},932:{c:"T",f:"B"},935:{c:"X",f:"B"},978:{c:"\\3A5",f:"B"},988:{c:"F",f:"B"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.scriptBold={32:[0,0,.25],65:[.717,.008,.803,{ic:.213,sk:.389}],66:[.708,.028,.908,{sk:.194}],67:[.728,.026,.666,{ic:.153,sk:.278}],68:[.708,.031,.774,{ic:.081,sk:.111}],69:[.707,.008,.562,{ic:.156,sk:.139}],70:[.735,.036,.895,{ic:.095,sk:.222}],71:[.717,.037,.61,{ic:.128,sk:.25}],72:[.717,.036,.969,{ic:.272,sk:.333}],73:[.717,.017,.809,{ic:.137,sk:.333}],74:[.717,.314,1.052,{ic:.081,sk:.417}],75:[.717,.037,.914,{ic:.29,sk:.361}],76:[.717,.017,.874,{ic:.161,sk:.306}],77:[.721,.05,1.08,{ic:.136,sk:.444}],78:[.726,.036,.902,{ic:.306,sk:.389}],79:[.707,.008,.738,{ic:.067,sk:.167}],80:[.716,.037,1.013,{sk:.222}],81:[.717,.017,.883,{sk:.278}],82:[.717,.017,.85,{sk:.194}],83:[.708,.036,.868,{ic:.148,sk:.333}],84:[.735,.037,.747,{ic:.249,sk:.222}],85:[.717,.017,.8,{ic:.16,sk:.25}],86:[.717,.017,.622,{ic:.228,sk:.222}],87:[.717,.017,.805,{ic:.221,sk:.25}],88:[.717,.017,.944,{ic:.187,sk:.278}],89:[.716,.017,.71,{ic:.249,sk:.194}],90:[.717,.016,.821,{ic:.211,sk:.306}],160:[0,0,.25],913:[.698,0,.869],914:[.686,0,.818],917:[.68,0,.756],918:[.686,0,.703],919:[.686,0,.9],921:[.686,0,.436],922:[.686,0,.901],924:[.686,0,1.092],925:[.686,0,.9],927:[.696,.01,.864],929:[.686,0,.786],930:[.696,.01,.894],932:[.675,0,.8],935:[.686,0,.869],978:[.697,0,.894],988:[.68,0,.724]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(205);e.script=n.AddCSS(i.script,{32:{c:" "},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},913:{c:"A",f:""},914:{c:"B",f:""},917:{c:"E",f:""},918:{c:"Z",f:""},919:{c:"H",f:""},921:{c:"I",f:""},922:{c:"K",f:""},924:{c:"M",f:""},925:{c:"N",f:""},927:{c:"O",f:""},929:{c:"P",f:""},930:{c:"\\398",f:""},932:{c:"T",f:""},935:{c:"X",f:""},978:{c:"\\3A5",f:""},988:{c:"F",f:""},8459:{c:"H",f:"SC"},8464:{c:"J",f:"SC"},8466:{c:"L",f:"SC"},8475:{c:"R",f:"SC"},8492:{c:"B",f:"SC"},8496:{c:"E",f:"SC"},8497:{c:"F",f:"SC"},8499:{c:"M",f:"SC"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.script={32:[0,0,.25],65:[.717,.008,.803,{ic:.213,sk:.389}],66:[.708,.028,.908,{sk:.194}],67:[.728,.026,.666,{ic:.153,sk:.278}],68:[.708,.031,.774,{ic:.081,sk:.111}],69:[.707,.008,.562,{ic:.156,sk:.139}],70:[.735,.036,.895,{ic:.095,sk:.222}],71:[.717,.037,.61,{ic:.128,sk:.25}],72:[.717,.036,.969,{ic:.272,sk:.333}],73:[.717,.017,.809,{ic:.137,sk:.333}],74:[.717,.314,1.052,{ic:.081,sk:.417}],75:[.717,.037,.914,{ic:.29,sk:.361}],76:[.717,.017,.874,{ic:.161,sk:.306}],77:[.721,.05,1.08,{ic:.136,sk:.444}],78:[.726,.036,.902,{ic:.306,sk:.389}],79:[.707,.008,.738,{ic:.067,sk:.167}],80:[.716,.037,1.013,{sk:.222}],81:[.717,.017,.883,{sk:.278}],82:[.717,.017,.85,{sk:.194}],83:[.708,.036,.868,{ic:.148,sk:.333}],84:[.735,.037,.747,{ic:.249,sk:.222}],85:[.717,.017,.8,{ic:.16,sk:.25}],86:[.717,.017,.622,{ic:.228,sk:.222}],87:[.717,.017,.805,{ic:.221,sk:.25}],88:[.717,.017,.944,{ic:.187,sk:.278}],89:[.716,.017,.71,{ic:.249,sk:.194}],90:[.717,.016,.821,{ic:.211,sk:.306}],160:[0,0,.25],913:[.716,0,.75],914:[.683,0,.708],917:[.68,0,.681],918:[.683,0,.611],919:[.683,0,.75],921:[.683,0,.361],922:[.683,0,.778],924:[.683,0,.917],925:[.683,0,.75],927:[.705,.022,.778],929:[.683,0,.681],930:[.705,.022,.778],932:[.677,0,.722],935:[.683,0,.75],978:[.705,0,.778],988:[.68,0,.653],8459:[.717,.036,.969,{ic:.272,sk:.333}],8464:[.717,.314,1.052,{ic:.081,sk:.417}],8466:[.717,.017,.874,{ic:.161,sk:.306}],8475:[.717,.017,.85,{sk:.194}],8492:[.708,.028,.908,{sk:.194}],8496:[.707,.008,.562,{ic:.156,sk:.139}],8497:[.735,.036,.895,{ic:.095,sk:.222}],8499:[.721,.05,1.08,{ic:.136,sk:.444}]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(207);e.smallop=n.AddCSS(i.smallop,{32:{c:" "},40:{c:"("},41:{c:")"},47:{c:"/"},91:{c:"["},93:{c:"]"},123:{c:"{"},125:{c:"}"},8260:{c:"/"},9001:{c:"\\27E8"},9002:{c:"\\27E9"},10072:{c:"\\2223"},10764:{c:"\\222C\\222C"},12296:{c:"\\27E8"},12297:{c:"\\27E9"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.smallop={32:[0,0,.25],40:[.85,.349,.458],41:[.85,.349,.458],47:[.85,.349,.578],91:[.85,.349,.417],92:[.85,.349,.578],93:[.85,.349,.417],123:[.85,.349,.583],125:[.85,.349,.583],160:[0,0,.25],710:[.744,-.551,.556],732:[.722,-.597,.556],770:[.744,-.551,0],771:[.722,-.597,0],8214:[.602,0,.778],8260:[.85,.349,.578],8593:[.6,0,.667],8595:[.6,0,.667],8657:[.599,0,.778],8659:[.6,0,.778],8719:[.75,.25,.944],8720:[.75,.25,.944],8721:[.75,.25,1.056],8730:[.85,.35,1],8739:[.627,.015,.333],8741:[.627,.015,.556],8747:[.805,.306,.472,{ic:.138}],8748:[.805,.306,.819,{ic:.138}],8749:[.805,.306,1.166,{ic:.138}],8750:[.805,.306,.472,{ic:.138}],8896:[.75,.249,.833],8897:[.75,.249,.833],8898:[.75,.249,.833],8899:[.75,.249,.833],8968:[.85,.349,.472],8969:[.85,.349,.472],8970:[.85,.349,.472],8971:[.85,.349,.472],9001:[.85,.35,.472],9002:[.85,.35,.472],9168:[.602,0,.667],10072:[.627,.015,.333],10216:[.85,.35,.472],10217:[.85,.35,.472],10752:[.75,.25,1.111],10753:[.75,.25,1.111],10754:[.75,.25,1.111],10756:[.75,.249,.833],10758:[.75,.249,.833],10764:[.805,.306,1.638,{ic:.138}],12296:[.85,.35,.472],12297:[.85,.35,.472]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(209);e.texCalligraphicBold=n.AddCSS(i.texCalligraphicBold,{32:{c:" "},47:{c:"/"},48:{c:"0"},49:{c:"1"},50:{c:"2"},51:{c:"3"},52:{c:"4"},53:{c:"5"},54:{c:"6"},55:{c:"7"},56:{c:"8"},57:{c:"9"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},97:{c:"a"},98:{c:"b"},99:{c:"c"},100:{c:"d"},101:{c:"e"},102:{c:"f"},103:{c:"g"},104:{c:"h"},105:{c:"i"},106:{c:"j"},107:{c:"k"},108:{c:"l"},109:{c:"m"},110:{c:"n"},111:{c:"o"},112:{c:"p"},113:{c:"q"},114:{c:"r"},115:{c:"s"},116:{c:"t"},117:{c:"u"},118:{c:"v"},119:{c:"w"},120:{c:"x"},121:{c:"y"},122:{c:"z"},913:{c:"A",f:"BI"},914:{c:"B",f:"BI"},917:{c:"E",f:"BI"},918:{c:"Z",f:"BI"},919:{c:"H",f:"BI"},921:{c:"I",f:"BI"},922:{c:"K",f:"BI"},924:{c:"M",f:"BI"},925:{c:"N",f:"BI"},927:{c:"O",f:"BI"},929:{c:"P",f:"BI"},930:{c:"\\398",f:"BI"},932:{c:"T",f:"BI"},935:{c:"X",f:"BI"},978:{c:"\\3A5",f:"BI"},988:{c:"F",f:"BI"},8260:{c:"/"},8710:{c:"\\394"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.texCalligraphicBold={32:[0,0,.25],47:[.711,.21,.894],48:[.46,.017,.575],49:[.461,0,.575],50:[.46,0,.575],51:[.461,.211,.575],52:[.469,.194,.575],53:[.461,.211,.575],54:[.66,.017,.575],55:[.476,.211,.575],56:[.661,.017,.575],57:[.461,.21,.575],65:[.751,.049,.921,{ic:.068,sk:.224}],66:[.705,.017,.748,{sk:.16}],67:[.703,.02,.613,{sk:.16}],68:[.686,0,.892,{sk:.0958}],69:[.703,.016,.607,{sk:.128}],70:[.686,.03,.814,{ic:.116,sk:.128}],71:[.703,.113,.682,{sk:.128}],72:[.686,.048,.987,{sk:.128}],73:[.686,0,.642,{ic:.104,sk:.0319}],74:[.686,.114,.779,{ic:.158,sk:.192}],75:[.703,.017,.871,{sk:.0639}],76:[.703,.017,.788,{sk:.16}],77:[.703,.049,1.378,{sk:.16}],78:[.84,.049,.937,{ic:.168,sk:.0958}],79:[.703,.017,.906,{sk:.128}],80:[.686,.067,.81,{sk:.0958}],81:[.703,.146,.939,{sk:.128}],82:[.686,.017,.99,{sk:.0958}],83:[.703,.016,.696,{sk:.16}],84:[.72,.069,.644,{ic:.303,sk:.0319}],85:[.686,.024,.715,{ic:.056,sk:.0958}],86:[.686,.077,.737,{sk:.0319}],87:[.686,.077,1.169,{sk:.0958}],88:[.686,0,.817,{ic:.089,sk:.16}],89:[.686,.164,.759,{sk:.0958}],90:[.686,0,.818,{sk:.16}],97:[.452,.008,.633],98:[.694,.008,.521],99:[.451,.008,.513,{sk:.0639}],100:[.694,.008,.61,{sk:.192}],101:[.452,.008,.554,{sk:.0639}],102:[.701,.201,.568,{ic:.056,sk:.192}],103:[.452,.202,.545,{sk:.0319}],104:[.694,.008,.668,{sk:-.0319}],105:[.694,.008,.405],106:[.694,.202,.471],107:[.694,.008,.604],108:[.694,.008,.348,{sk:.0958}],109:[.452,.008,1.032],110:[.452,.008,.713],111:[.452,.008,.585,{sk:.0639}],112:[.452,.194,.601,{sk:.0958}],113:[.452,.194,.542,{sk:.0958}],114:[.452,.008,.529,{sk:.0639}],115:[.451,.008,.531,{sk:.0639}],116:[.643,.007,.415,{sk:.0958}],117:[.452,.008,.681,{sk:.0319}],118:[.453,.008,.567,{sk:.0319}],119:[.453,.008,.831,{sk:.0958}],120:[.452,.008,.659,{sk:.0319}],121:[.452,.202,.59,{sk:.0639}],122:[.452,.008,.555,{sk:.0639}],160:[0,0,.25],913:[.711,0,.869,{sk:.16}],914:[.686,0,.866,{sk:.0958}],915:[.68,0,.657,{ic:.12,sk:.0958}],916:[.711,0,.958,{sk:.192}],917:[.68,0,.81,{sk:.0958}],918:[.686,0,.773,{sk:.0958}],919:[.686,0,.982,{sk:.0639}],920:[.702,.017,.867,{sk:.0958}],921:[.686,0,.511,{ic:.062,sk:.128}],922:[.686,0,.971,{sk:.0639}],923:[.711,0,.806,{sk:.192}],924:[.686,0,1.142,{ic:.077,sk:.0958}],925:[.686,0,.95,{ic:.077,sk:.0958}],926:[.675,0,.841,{sk:.0958}],927:[.703,.017,.837,{sk:.0958}],928:[.68,0,.982,{sk:.0639}],929:[.686,0,.723,{ic:.124,sk:.0958}],930:[.702,.017,.867,{sk:.0958}],931:[.686,0,.885,{sk:.0958}],932:[.675,0,.637,{ic:.135,sk:.0958}],933:[.703,0,.671,{ic:.131,sk:.0639}],934:[.686,0,.767,{sk:.0958}],935:[.686,0,.947,{sk:.0958}],936:[.686,0,.714,{ic:.076,sk:.0639}],937:[.703,0,.879,{sk:.0958}],945:[.452,.008,.761,{sk:.0319}],946:[.701,.194,.66,{sk:.0958}],947:[.451,.211,.59],948:[.725,.008,.522,{sk:.0639}],949:[.461,.017,.529,{sk:.0958}],950:[.711,.202,.508,{sk:.0958}],951:[.452,.211,.6,{sk:.0639}],952:[.702,.008,.562,{sk:.0958}],953:[.452,.008,.412,{sk:.0639}],954:[.452,.008,.668],955:[.694,.013,.671],956:[.452,.211,.708,{sk:.0319}],957:[.452,0,.577,{sk:.0319}],958:[.711,.201,.508,{sk:.128}],959:[.452,.008,.585,{sk:.0639}],960:[.444,.008,.682],961:[.451,.211,.612,{sk:.0958}],962:[.451,.105,.424,{sk:.0958}],963:[.444,.008,.686],964:[.444,.013,.521,{ic:.089,sk:.0319}],965:[.453,.008,.631,{sk:.0319}],966:[.452,.216,.747,{sk:.0958}],967:[.452,.201,.718,{sk:.0639}],968:[.694,.202,.758,{sk:.128}],969:[.453,.008,.718],977:[.701,.008,.692,{sk:.0958}],978:[.703,0,.671,{ic:.131,sk:.0639}],981:[.694,.202,.712,{sk:.0958}],982:[.444,.008,.975],988:[.68,0,.689,{ic:.12,sk:.0958}],1009:[.451,.194,.612,{sk:.0958}],1013:[.444,.007,.483,{sk:.0639}],8260:[.711,.21,.894],8710:[.711,0,.958,{sk:.192}]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(211);e.texCalligraphic=n.AddCSS(i.texCalligraphic,{32:{c:" "},48:{c:"0"},49:{c:"1"},50:{c:"2"},51:{c:"3"},52:{c:"4"},53:{c:"5"},54:{c:"6"},55:{c:"7"},56:{c:"8"},57:{c:"9"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},913:{c:"A",f:"I"},914:{c:"B",f:"I"},917:{c:"E",f:"I"},918:{c:"Z",f:"I"},919:{c:"H",f:"I"},921:{c:"I",f:"I"},922:{c:"K",f:"I"},924:{c:"M",f:"I"},925:{c:"N",f:"I"},927:{c:"O",f:"I"},929:{c:"P",f:"I"},930:{c:"\\398",f:"I"},932:{c:"T",f:"I"},935:{c:"X",f:"I"},978:{c:"\\3A5",f:"I"},988:{c:"F",f:"I"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.texCalligraphic={32:[0,0,.25],48:[.452,.022,.5],49:[.453,0,.5],50:[.453,0,.5],51:[.452,.216,.5],52:[.464,.194,.5],53:[.453,.216,.5],54:[.665,.022,.5],55:[.463,.216,.5],56:[.666,.021,.5],57:[.453,.216,.5],65:[.728,.05,.798,{sk:.194}],66:[.705,.022,.657,{sk:.139}],67:[.705,.025,.527,{sk:.139}],68:[.683,0,.771,{sk:.0833}],69:[.705,.022,.528,{sk:.111}],70:[.683,.032,.719,{ic:.11,sk:.111}],71:[.704,.119,.595,{sk:.111}],72:[.683,.048,.845,{sk:.111}],73:[.683,0,.545,{ic:.097,sk:.0278}],74:[.683,.119,.678,{ic:.161,sk:.167}],75:[.705,.022,.762,{sk:.0556}],76:[.705,.022,.69,{sk:.139}],77:[.705,.05,1.201,{sk:.139}],78:[.789,.05,.82,{ic:.159,sk:.0833}],79:[.705,.022,.796,{sk:.111}],80:[.683,.057,.696,{sk:.0833}],81:[.705,.131,.817,{sk:.111}],82:[.682,.022,.848,{sk:.0833}],83:[.705,.022,.606,{sk:.139}],84:[.717,.068,.545,{ic:.288,sk:.0278}],85:[.683,.028,.626,{ic:.061,sk:.0833}],86:[.683,.052,.613,{sk:.0278}],87:[.683,.053,.988,{sk:.0833}],88:[.683,0,.713,{ic:.094,sk:.139}],89:[.683,.143,.668,{sk:.0833}],90:[.683,0,.725,{sk:.139}],160:[0,0,.25],913:[.716,0,.75,{sk:.139}],914:[.683,0,.759,{sk:.0833}],917:[.68,0,.738,{sk:.0833}],918:[.683,0,.683,{sk:.0833}],919:[.683,0,.831,{ic:.057,sk:.0556}],921:[.683,0,.44,{ic:.064,sk:.111}],922:[.683,0,.849,{sk:.0556}],924:[.683,0,.97,{ic:.081,sk:.0833}],925:[.683,0,.803,{ic:.085,sk:.0833}],927:[.704,.022,.763,{sk:.0833}],929:[.683,0,.642,{ic:.109,sk:.0833}],930:[.704,.022,.763,{sk:.0833}],932:[.677,0,.584,{ic:.12,sk:.0833}],935:[.683,0,.828,{sk:.0833}],978:[.705,0,.583,{ic:.117,sk:.0556}],988:[.68,0,.643,{ic:.106,sk:.0833}]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(213);e.texMathit=n.AddCSS(i.texMathit,{32:{c:" "},33:{c:"!"},35:{c:"#"},37:{c:"%"},38:{c:"&"},40:{c:"("},41:{c:")"},42:{c:"*"},43:{c:"+"},44:{c:","},45:{c:"-"},46:{c:"."},47:{c:"/"},48:{c:"0"},49:{c:"1"},50:{c:"2"},51:{c:"3"},52:{c:"4"},53:{c:"5"},54:{c:"6"},55:{c:"7"},56:{c:"8"},57:{c:"9"},58:{c:":"},59:{c:";"},61:{c:"="},63:{c:"?"},64:{c:"@"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},91:{c:"["},93:{c:"]"},94:{c:"^"},95:{c:"_"},97:{c:"a"},98:{c:"b"},99:{c:"c"},100:{c:"d"},101:{c:"e"},102:{c:"f"},103:{c:"g"},104:{c:"h"},105:{c:"i"},106:{c:"j"},107:{c:"k"},108:{c:"l"},109:{c:"m"},110:{c:"n"},111:{c:"o"},112:{c:"p"},113:{c:"q"},114:{c:"r"},115:{c:"s"},116:{c:"t"},117:{c:"u"},118:{c:"v"},119:{c:"w"},120:{c:"x"},121:{c:"y"},122:{c:"z"},126:{c:"~"},913:{c:"A"},914:{c:"B"},917:{c:"E"},918:{c:"Z"},919:{c:"H"},921:{c:"I"},922:{c:"K"},924:{c:"M"},925:{c:"N"},927:{c:"O"},929:{c:"P"},930:{c:"\\398"},932:{c:"T"},935:{c:"X"},978:{c:"\\3A5"},988:{c:"F"},8213:{c:"\\2014"},8215:{c:"_"},8260:{c:"/"},8710:{c:"\\394"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.texMathit={32:[0,0,.25],33:[.716,0,.307,{ic:.073}],34:[.694,-.379,.514],35:[.694,.194,.818],37:[.75,.056,.818],38:[.716,.022,.767],39:[.694,-.379,.307,{ic:.07}],40:[.75,.25,.409,{ic:.108}],41:[.75,.25,.409],42:[.75,-.32,.511,{ic:.073}],43:[.557,.057,.767],44:[.121,.194,.307],45:[.251,-.18,.358],46:[.121,0,.307],47:[.75,.25,.511,{ic:.106}],48:[.665,.021,.511,{ic:.051}],49:[.666,0,.511],50:[.666,.022,.511],51:[.666,.022,.511,{ic:.051}],52:[.666,.194,.511],53:[.666,.022,.511,{ic:.056}],54:[.665,.022,.511,{ic:.054}],55:[.666,.022,.511,{ic:.123}],56:[.666,.021,.511],57:[.666,.022,.511],58:[.431,0,.307],59:[.431,.194,.307],61:[.367,-.133,.767],63:[.716,0,.511],64:[.705,.011,.767],65:[.716,0,.743],66:[.683,0,.704],67:[.705,.021,.716,{ic:.096}],68:[.683,0,.755],69:[.68,0,.678,{ic:.065}],70:[.68,0,.653,{ic:.078}],71:[.705,.022,.774],72:[.683,0,.743,{ic:.117}],73:[.683,0,.386,{ic:.122}],74:[.683,.021,.525,{ic:.097}],75:[.683,0,.769,{ic:.09}],76:[.683,0,.627],77:[.683,0,.897,{ic:.113}],78:[.683,0,.743,{ic:.117}],79:[.704,.022,.767],80:[.683,0,.678,{ic:.051}],81:[.704,.194,.767],82:[.683,.022,.729],83:[.705,.022,.562,{ic:.071}],84:[.677,0,.716,{ic:.09}],85:[.683,.022,.743,{ic:.117}],86:[.683,.022,.743,{ic:.125}],87:[.683,.022,.999,{ic:.125}],88:[.683,0,.743,{ic:.082}],89:[.683,0,.743,{ic:.132}],90:[.683,0,.613,{ic:.091}],91:[.75,.25,.307,{ic:.139}],93:[.75,.25,.307,{ic:.052}],94:[.694,-.527,.511],95:[-.025,.062,.511],97:[.442,.011,.511],98:[.694,.011,.46],99:[.441,.01,.46],100:[.694,.011,.511,{ic:.056}],101:[.442,.01,.46],102:[.705,.204,.307,{ic:.143}],103:[.442,.205,.46],104:[.694,.011,.511],105:[.656,.01,.307],106:[.656,.204,.307,{ic:.057}],107:[.694,.011,.46],108:[.694,.011,.256,{ic:.056}],109:[.442,.011,.818],110:[.442,.011,.562],111:[.442,.011,.511],112:[.442,.194,.511],113:[.442,.194,.46],114:[.442,.011,.422,{ic:.062}],115:[.442,.011,.409],116:[.626,.011,.332],117:[.441,.011,.537],118:[.443,.01,.46],119:[.443,.011,.664],120:[.442,.011,.464],121:[.441,.205,.486],122:[.442,.011,.409,{ic:.057}],126:[.318,-.208,.511,{ic:.06}],160:[0,0,.25],163:[.714,.011,.769],305:[.441,.01,.307],567:[.442,.204,.332],768:[.697,-.5,0],769:[.697,-.5,0],770:[.694,-.527,0],771:[.668,-.558,0,{ic:.06}],772:[.589,-.544,0,{ic:.054}],774:[.694,-.515,0,{ic:.062}],775:[.669,-.548,0],776:[.669,-.554,0],778:[.716,-.542,0],779:[.697,-.503,0,{ic:.065}],780:[.638,-.502,0],913:[.716,0,.743],914:[.683,0,.704],915:[.68,0,.627,{ic:.078}],916:[.716,0,.818],917:[.68,0,.678,{ic:.065}],918:[.683,0,.613,{ic:.091}],919:[.683,0,.743,{ic:.117}],920:[.704,.022,.767],921:[.683,0,.386,{ic:.122}],922:[.683,0,.769,{ic:.09}],923:[.716,0,.692],924:[.683,0,.897,{ic:.113}],925:[.683,0,.743,{ic:.117}],926:[.677,0,.664,{ic:.09}],927:[.704,.022,.767],928:[.68,0,.743,{ic:.116}],929:[.683,0,.678,{ic:.051}],930:[.704,.022,.767],931:[.683,0,.716,{ic:.066}],932:[.677,0,.716,{ic:.09}],933:[.705,0,.767,{ic:.065}],934:[.683,0,.716],935:[.683,0,.743,{ic:.082}],936:[.683,0,.767,{ic:.057}],937:[.705,0,.716],978:[.705,0,.767,{ic:.065}],988:[.68,0,.653,{ic:.078}],8211:[.285,-.248,.511],8212:[.285,-.248,1.022],8213:[.285,-.248,1.022],8215:[-.025,.062,.511],8216:[.694,-.379,.307,{ic:.055}],8217:[.694,-.379,.307,{ic:.07}],8220:[.694,-.379,.514,{ic:.092}],8221:[.694,-.379,.514],8260:[.75,.25,.511,{ic:.106}],8463:[.695,.013,.54],8710:[.716,0,.818]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(215);e.texOldstyleBold=n.AddCSS(i.texOldstyleBold,{32:{c:" "},48:{c:"0"},49:{c:"1"},50:{c:"2"},51:{c:"3"},52:{c:"4"},53:{c:"5"},54:{c:"6"},55:{c:"7"},56:{c:"8"},57:{c:"9"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},913:{c:"A",f:"B"},914:{c:"B",f:"B"},917:{c:"E",f:"B"},918:{c:"Z",f:"B"},919:{c:"H",f:"B"},921:{c:"I",f:"B"},922:{c:"K",f:"B"},924:{c:"M",f:"B"},925:{c:"N",f:"B"},927:{c:"O",f:"B"},929:{c:"P",f:"B"},930:{c:"\\398",f:"B"},932:{c:"T",f:"B"},935:{c:"X",f:"B"},978:{c:"\\3A5",f:"B"},988:{c:"F",f:"B"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.texOldstyleBold={32:[0,0,.25],48:[.46,.017,.575],49:[.461,0,.575],50:[.46,0,.575],51:[.461,.211,.575],52:[.469,.194,.575],53:[.461,.211,.575],54:[.66,.017,.575],55:[.476,.211,.575],56:[.661,.017,.575],57:[.461,.21,.575],65:[.751,.049,.921,{ic:.068,sk:.224}],66:[.705,.017,.748,{sk:.16}],67:[.703,.02,.613,{sk:.16}],68:[.686,0,.892,{sk:.0958}],69:[.703,.016,.607,{sk:.128}],70:[.686,.03,.814,{ic:.116,sk:.128}],71:[.703,.113,.682,{sk:.128}],72:[.686,.048,.987,{sk:.128}],73:[.686,0,.642,{ic:.104,sk:.0319}],74:[.686,.114,.779,{ic:.158,sk:.192}],75:[.703,.017,.871,{sk:.0639}],76:[.703,.017,.788,{sk:.16}],77:[.703,.049,1.378,{sk:.16}],78:[.84,.049,.937,{ic:.168,sk:.0958}],79:[.703,.017,.906,{sk:.128}],80:[.686,.067,.81,{sk:.0958}],81:[.703,.146,.939,{sk:.128}],82:[.686,.017,.99,{sk:.0958}],83:[.703,.016,.696,{sk:.16}],84:[.72,.069,.644,{ic:.303,sk:.0319}],85:[.686,.024,.715,{ic:.056,sk:.0958}],86:[.686,.077,.737,{sk:.0319}],87:[.686,.077,1.169,{sk:.0958}],88:[.686,0,.817,{ic:.089,sk:.16}],89:[.686,.164,.759,{sk:.0958}],90:[.686,0,.818,{sk:.16}],160:[0,0,.25],913:[.698,0,.869],914:[.686,0,.818],917:[.68,0,.756],918:[.686,0,.703],919:[.686,0,.9],921:[.686,0,.436],922:[.686,0,.901],924:[.686,0,1.092],925:[.686,0,.9],927:[.696,.01,.864],929:[.686,0,.786],930:[.696,.01,.894],932:[.675,0,.8],935:[.686,0,.869],978:[.697,0,.894],988:[.68,0,.724]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(217);e.texOldstyle=n.AddCSS(i.texOldstyle,{32:{c:" "},48:{c:"0"},49:{c:"1"},50:{c:"2"},51:{c:"3"},52:{c:"4"},53:{c:"5"},54:{c:"6"},55:{c:"7"},56:{c:"8"},57:{c:"9"},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},913:{c:"A",f:""},914:{c:"B",f:""},917:{c:"E",f:""},918:{c:"Z",f:""},919:{c:"H",f:""},921:{c:"I",f:""},922:{c:"K",f:""},924:{c:"M",f:""},925:{c:"N",f:""},927:{c:"O",f:""},929:{c:"P",f:""},930:{c:"\\398",f:""},932:{c:"T",f:""},935:{c:"X",f:""},978:{c:"\\3A5",f:""},988:{c:"F",f:""}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.texOldstyle={32:[0,0,.25],48:[.452,.022,.5],49:[.453,0,.5],50:[.453,0,.5],51:[.452,.216,.5],52:[.464,.194,.5],53:[.453,.216,.5],54:[.665,.022,.5],55:[.463,.216,.5],56:[.666,.021,.5],57:[.453,.216,.5],65:[.728,.05,.798,{sk:.194}],66:[.705,.022,.657,{sk:.139}],67:[.705,.025,.527,{sk:.139}],68:[.683,0,.771,{sk:.0833}],69:[.705,.022,.528,{sk:.111}],70:[.683,.032,.719,{ic:.11,sk:.111}],71:[.704,.119,.595,{sk:.111}],72:[.683,.048,.845,{sk:.111}],73:[.683,0,.545,{ic:.097,sk:.0278}],74:[.683,.119,.678,{ic:.161,sk:.167}],75:[.705,.022,.762,{sk:.0556}],76:[.705,.022,.69,{sk:.139}],77:[.705,.05,1.201,{sk:.139}],78:[.789,.05,.82,{ic:.159,sk:.0833}],79:[.705,.022,.796,{sk:.111}],80:[.683,.057,.696,{sk:.0833}],81:[.705,.131,.817,{sk:.111}],82:[.682,.022,.848,{sk:.0833}],83:[.705,.022,.606,{sk:.139}],84:[.717,.068,.545,{ic:.288,sk:.0278}],85:[.683,.028,.626,{ic:.061,sk:.0833}],86:[.683,.052,.613,{sk:.0278}],87:[.683,.053,.988,{sk:.0833}],88:[.683,0,.713,{ic:.094,sk:.139}],89:[.683,.143,.668,{sk:.0833}],90:[.683,0,.725,{sk:.139}],160:[0,0,.25],913:[.716,0,.75],914:[.683,0,.708],917:[.68,0,.681],918:[.683,0,.611],919:[.683,0,.75],921:[.683,0,.361],922:[.683,0,.778],924:[.683,0,.917],925:[.683,0,.75],927:[.705,.022,.778],929:[.683,0,.681],930:[.705,.022,.778],932:[.677,0,.722],935:[.683,0,.75],978:[.705,0,.778],988:[.68,0,.653]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(219);e.texSize3=n.AddCSS(i.texSize3,{32:{c:" "},40:{c:"("},41:{c:")"},47:{c:"/"},91:{c:"["},93:{c:"]"},123:{c:"{"},125:{c:"}"},8260:{c:"/"},9001:{c:"\\27E8"},9002:{c:"\\27E9"},12296:{c:"\\27E8"},12297:{c:"\\27E9"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.texSize3={32:[0,0,.25],40:[1.45,.949,.736],41:[1.45,.949,.736],47:[1.45,.949,1.044],91:[1.45,.949,.528],92:[1.45,.949,1.044],93:[1.45,.949,.528],123:[1.45,.949,.75],125:[1.45,.949,.75],160:[0,0,.25],710:[.772,-.564,1.444],732:[.749,-.61,1.444],770:[.772,-.564,0],771:[.749,-.61,0],8260:[1.45,.949,1.044],8730:[1.45,.95,1],8968:[1.45,.949,.583],8969:[1.45,.949,.583],8970:[1.45,.949,.583],8971:[1.45,.949,.583],9001:[1.45,.95,.75],9002:[1.45,.949,.75],10216:[1.45,.95,.75],10217:[1.45,.949,.75],12296:[1.45,.95,.75],12297:[1.45,.949,.75]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(221);e.texSize4=n.AddCSS(i.texSize4,{32:{c:" "},40:{c:"("},41:{c:")"},47:{c:"/"},91:{c:"["},93:{c:"]"},123:{c:"{"},125:{c:"}"},8260:{c:"/"},9001:{c:"\\27E8"},9002:{c:"\\27E9"},12296:{c:"\\27E8"},12297:{c:"\\27E9"},57685:{c:"\\E153\\E152"},57686:{c:"\\E151\\E150"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.texSize4={32:[0,0,.25],40:[1.75,1.249,.792],41:[1.75,1.249,.792],47:[1.75,1.249,1.278],91:[1.75,1.249,.583],92:[1.75,1.249,1.278],93:[1.75,1.249,.583],123:[1.75,1.249,.806],125:[1.75,1.249,.806],160:[0,0,.25],710:[.845,-.561,1.889],732:[.823,-.583,1.889],770:[.845,-.561,0],771:[.823,-.583,0],8260:[1.75,1.249,1.278],8730:[1.75,1.25,1],8968:[1.75,1.249,.639],8969:[1.75,1.249,.639],8970:[1.75,1.249,.639],8971:[1.75,1.249,.639],9001:[1.75,1.248,.806],9002:[1.75,1.248,.806],9115:[1.154,.655,.875],9116:[.61,.01,.875],9117:[1.165,.644,.875],9118:[1.154,.655,.875],9119:[.61,.01,.875],9120:[1.165,.644,.875],9121:[1.154,.645,.667],9122:[.602,0,.667],9123:[1.155,.644,.667],9124:[1.154,.645,.667],9125:[.602,0,.667],9126:[1.155,.644,.667],9127:[.899,.01,.889],9128:[1.16,.66,.889],9129:[.01,.899,.889],9130:[.29,.015,.889],9131:[.899,.01,.889],9132:[1.16,.66,.889],9133:[.01,.899,.889],9143:[.935,.885,1.056],10216:[1.75,1.248,.806],10217:[1.75,1.248,.806],12296:[1.75,1.248,.806],12297:[1.75,1.248,.806],57344:[.625,.014,1.056],57345:[.605,.014,1.056],57680:[.12,.213,.45],57681:[.12,.213,.45],57682:[.333,0,.45],57683:[.333,0,.45],57684:[.32,.2,.4],57685:[.333,0,.9],57686:[.12,.213,.9]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(1),i=r(223);e.texVariant=n.AddCSS(i.texVariant,{32:{c:" "},65:{c:"A"},66:{c:"B"},67:{c:"C"},68:{c:"D"},69:{c:"E"},70:{c:"F"},71:{c:"G"},72:{c:"H"},73:{c:"I"},74:{c:"J"},75:{c:"K"},76:{c:"L"},77:{c:"M"},78:{c:"N"},79:{c:"O"},80:{c:"P"},81:{c:"Q"},82:{c:"R"},83:{c:"S"},84:{c:"T"},85:{c:"U"},86:{c:"V"},87:{c:"W"},88:{c:"X"},89:{c:"Y"},90:{c:"Z"},107:{c:"k"},988:{c:"\\E008"},1008:{c:"\\E009"},8463:{f:""},8726:{f:""},8740:{c:"\\E006"},8742:{c:"\\E007"},8808:{c:"\\E00C"},8809:{c:"\\E00D"},8816:{c:"\\E011"},8817:{c:"\\E00E"},8840:{c:"\\E016"},8841:{c:"\\E018"},8842:{c:"\\E01A"},8843:{c:"\\E01B"},10887:{c:"\\E010"},10888:{c:"\\E00F"},10955:{c:"\\E017"},10956:{c:"\\E019"}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.texVariant={32:[0,0,.25],65:[.701,0,.722],66:[.683,0,.667],67:[.702,.019,.722],68:[.683,0,.722],69:[.683,0,.667],70:[.683,0,.611],71:[.702,.019,.778],72:[.683,0,.778],73:[.683,0,.389],74:[.683,.077,.5],75:[.683,0,.778],76:[.683,0,.667],77:[.683,0,.944],78:[.683,.02,.722],79:[.701,.019,.778],80:[.683,0,.611],81:[.701,.181,.778],82:[.683,0,.722],83:[.702,.012,.556],84:[.683,0,.667],85:[.683,.019,.722],86:[.683,.02,.722],87:[.683,.019,1],88:[.683,0,.722],89:[.683,0,.722],90:[.683,0,.667],107:[.683,0,.556],160:[0,0,.25],165:[.683,0,.75],174:[.709,.175,.947],240:[.749,.021,.556],295:[.695,.013,.54],710:[.845,-.561,2.333],732:[.899,-.628,2.333],770:[.845,-.561,0],771:[.899,-.628,0],988:[.605,.085,.778],989:[.605,.085,.778],1008:[.434,.006,.667,{ic:.067}],8245:[.56,-.043,.275],8463:[.695,.013,.54],8487:[.684,.022,.722],8498:[.695,0,.556],8502:[.763,.021,.667],8503:[.764,.043,.444],8504:[.764,.043,.667],8513:[.705,.023,.639],8592:[.437,-.064,.5],8594:[.437,-.064,.5],8602:[.437,-.06,1],8603:[.437,-.06,1],8606:[.417,-.083,1],8608:[.417,-.083,1],8610:[.417,-.083,1.111],8611:[.417,-.083,1.111],8619:[.575,.041,1],8620:[.575,.041,1],8621:[.417,-.083,1.389],8622:[.437,-.06,1],8624:[.722,0,.5],8625:[.722,0,.5],8630:[.461,0,1],8631:[.46,0,1],8634:[.65,.083,.778],8635:[.65,.083,.778],8638:[.694,.194,.417],8639:[.694,.194,.417],8642:[.694,.194,.417],8643:[.694,.194,.417],8644:[.667,0,1],8646:[.667,0,1],8647:[.583,.083,1],8648:[.694,.193,.833],8649:[.583,.083,1],8650:[.694,.194,.833],8651:[.514,.014,1],8652:[.514,.014,1],8653:[.534,.035,1],8654:[.534,.037,1],8655:[.534,.035,1],8666:[.611,.111,1],8667:[.611,.111,1],8669:[.417,-.083,1],8672:[.437,-.064,1.334],8674:[.437,-.064,1.334],8705:[.846,.021,.5],8708:[.86,.166,.556],8709:[.587,0,.778],8717:[.44,0,.429],8722:[.27,-.23,.5],8724:[.766,.093,.778],8726:[.43,.023,.778],8733:[.472,-.028,.778],8736:[.694,0,.722],8737:[.714,.02,.722],8738:[.551,.051,.722],8739:[.43,.023,.222],8740:[.43,.023,.222],8741:[.431,.023,.389],8742:[.431,.024,.389],8756:[.471,.082,.667],8757:[.471,.082,.667],8764:[.365,-.132,.778],8765:[.367,-.133,.778],8769:[.467,-.032,.778],8770:[.463,-.034,.778],8774:[.652,.155,.778],8776:[.481,-.05,.778],8778:[.579,.039,.778],8782:[.492,-.008,.778],8783:[.492,-.133,.778],8785:[.609,.108,.778],8786:[.601,.101,.778],8787:[.601,.102,.778],8790:[.367,-.133,.778],8791:[.721,-.133,.778],8796:[.859,-.133,.778],8806:[.753,.175,.778],8807:[.753,.175,.778],8808:[.752,.284,.778],8809:[.752,.284,.778],8812:[.75,.25,.5],8814:[.708,.209,.778],8815:[.708,.209,.778],8816:[.919,.421,.778],8817:[.919,.421,.778],8818:[.732,.228,.778],8819:[.732,.228,.778],8822:[.681,.253,.778],8823:[.681,.253,.778],8828:[.58,.153,.778],8829:[.58,.154,.778],8830:[.732,.228,.778],8831:[.732,.228,.778],8832:[.705,.208,.778],8833:[.705,.208,.778],8840:[.828,.33,.778],8841:[.828,.33,.778],8842:[.634,.255,.778],8843:[.634,.254,.778],8847:[.539,.041,.778],8848:[.539,.041,.778],8858:[.582,.082,.778],8859:[.582,.082,.778],8861:[.582,.082,.778],8862:[.689,0,.778],8863:[.689,0,.778],8864:[.689,0,.778],8865:[.689,0,.778],8872:[.694,0,.611],8873:[.694,0,.722],8874:[.694,0,.889],8876:[.695,0,.611],8877:[.695,0,.611],8878:[.695,0,.722],8879:[.695,0,.722],8882:[.539,.041,.778],8883:[.539,.041,.778],8884:[.636,.138,.778],8885:[.636,.138,.778],8888:[.408,-.092,1.111],8890:[.431,.212,.556],8891:[.716,0,.611],8892:[.716,0,.611],8901:[.189,0,.278],8903:[.545,.044,.778],8905:[.492,-.008,.778],8906:[.492,-.008,.778],8907:[.694,.022,.778],8908:[.694,.022,.778],8909:[.464,-.036,.778],8910:[.578,.021,.76],8911:[.578,.022,.76],8912:[.54,.04,.778],8913:[.54,.04,.778],8914:[.598,.022,.667],8915:[.598,.022,.667],8916:[.736,.022,.667],8918:[.541,.041,.778],8919:[.541,.041,.778],8920:[.568,.067,1.333],8921:[.568,.067,1.333],8922:[.886,.386,.778],8923:[.886,.386,.778],8926:[.734,0,.778],8927:[.734,0,.778],8928:[.801,.303,.778],8929:[.801,.303,.778],8934:[.73,.359,.778],8935:[.73,.359,.778],8936:[.73,.359,.778],8937:[.73,.359,.778],8938:[.706,.208,.778],8939:[.706,.208,.778],8940:[.802,.303,.778],8941:[.801,.303,.778],8994:[.378,-.122,.778],8995:[.378,-.143,.778],9416:[.709,.175,.902],9484:[.694,-.306,.5],9488:[.694,-.306,.5],9492:[.366,.022,.5],9496:[.366,.022,.5],9585:[.694,.195,.889],9586:[.694,.195,.889],9632:[.689,0,.778],9633:[.689,0,.778],9650:[.575,.02,.722],9651:[.575,.02,.722],9654:[.539,.041,.778],9660:[.576,.019,.722],9661:[.576,.019,.722],9664:[.539,.041,.778],9674:[.716,.132,.667],9733:[.694,.111,.944],10003:[.706,.034,.833],10016:[.716,.022,.833],10731:[.716,.132,.667],10846:[.813,.097,.611],10877:[.636,.138,.778],10878:[.636,.138,.778],10885:[.762,.29,.778],10886:[.762,.29,.778],10887:[.801,.303,.778],10888:[.801,.303,.778],10889:[.761,.387,.778],10890:[.761,.387,.778],10891:[1.003,.463,.778],10892:[1.003,.463,.778],10901:[.636,.138,.778],10902:[.636,.138,.778],10933:[.752,.286,.778],10934:[.752,.286,.778],10935:[.761,.294,.778],10936:[.761,.294,.778],10937:[.761,.337,.778],10938:[.761,.337,.778],10949:[.753,.215,.778],10950:[.753,.215,.778],10955:[.752,.332,.778],10956:[.752,.333,.778],57350:[.43,.023,.222],57351:[.431,.024,.389],57352:[.605,.085,.778],57353:[.434,.006,.667,{ic:.067}],57356:[.752,.284,.778],57357:[.752,.284,.778],57358:[.919,.421,.778],57359:[.801,.303,.778],57360:[.801,.303,.778],57361:[.919,.421,.778],57366:[.828,.33,.778],57367:[.752,.332,.778],57368:[.828,.33,.778],57369:[.752,.333,.778],57370:[.634,.255,.778],57371:[.634,.254,.778]}},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(23);e.HDW1=[.75,.25,.875],e.HDW2=[.583,.082,1],e.HDW3=[.583,.082,.5],e.VSIZES=[1,1.2,1.8,2.4,3];var i={c:47,dir:n.V,sizes:e.VSIZES},o={c:175,dir:n.H,sizes:[.59],stretch:[0,175],HDW:[.59,-.544,.5]},a={c:710,dir:n.H,sizes:[.517,.817,1.335,1.777,1.909]},s={c:732,dir:n.H,sizes:[.583,.805,1.33,1.773,1.887]},c={c:8211,dir:n.H,sizes:[.5],stretch:[0,8211],HDW:[.285,-.248,.5]},l={c:8592,dir:n.H,sizes:[1],stretch:[8592,8722],HDW:e.HDW2},u={c:8594,dir:n.H,sizes:[1],stretch:[0,8722,8594],HDW:e.HDW2},h={c:8596,dir:n.H,sizes:[1],stretch:[8592,8722,8594],HDW:e.HDW2},f={c:8612,dir:n.H,stretch:[8592,8722,8739],HDW:e.HDW3,min:1.278},p={c:8614,dir:n.H,sizes:[1],stretch:[8739,8722,8594],HDW:e.HDW2},d={c:8656,dir:n.H,sizes:[1],stretch:[8656,61],HDW:e.HDW2},m={c:8658,dir:n.H,sizes:[1],stretch:[0,61,8658],HDW:e.HDW2},y={c:8660,dir:n.H,sizes:[1],stretch:[8656,61,8658],HDW:e.HDW2},v={c:8722,dir:n.H,sizes:[.778],stretch:[0,8722],HDW:[.583,.082,.778]},b={c:8739,dir:n.V,sizes:[1],stretch:[0,8739],HDW:[.75,.249,.278]},g={c:9180,dir:n.H,sizes:[.778,1],schar:[8994,8994],stretch:[57680,57684,57681],HDW:[.32,.2,.5]},M={c:9181,dir:n.H,sizes:[.778,1],schar:[8995,8995],stretch:[57682,57684,57683],HDW:[.32,.2,.5]},O={c:9182,dir:n.H,stretch:[57680,57684,57681,57685],HDW:[.32,.2,.5],min:1.8},x={c:9183,dir:n.H,stretch:[57682,57684,57683,57686],HDW:[.32,.2,.5],min:1.8},S={c:10216,dir:n.V,sizes:e.VSIZES},E={c:10217,dir:n.V,sizes:e.VSIZES},C={c:10502,dir:n.H,stretch:[8656,61,8739],HDW:e.HDW3,min:1.278},_={c:10503,dir:n.H,stretch:[8872,61,8658],HDW:e.HDW3,min:1.278};e.delimiters={40:{dir:n.V,sizes:e.VSIZES,stretch:[9115,9116,9117],HDW:[.75,.25,.875]},41:{dir:n.V,sizes:e.VSIZES,stretch:[9118,9119,9120],HDW:[.75,.25,.875]},45:v,47:i,61:{dir:n.H,sizes:[.767],stretch:[0,61],HDW:[.583,.082,.778]},91:{dir:n.V,sizes:e.VSIZES,stretch:[9121,9122,9123],HDW:e.HDW1},92:{dir:n.V,sizes:e.VSIZES},93:{dir:n.V,sizes:e.VSIZES,stretch:[9124,9125,9126],HDW:e.HDW1},94:a,95:c,123:{dir:n.V,sizes:e.VSIZES,stretch:[9127,9130,9129,9128],HDW:[.75,.25,.889]},124:{dir:n.V,sizes:[1],stretch:[0,8739],HDW:[.75,.249,.278]},125:{dir:n.V,sizes:e.VSIZES,stretch:[9131,9130,9133,9132],HDW:[.75,.25,.889]},126:s,175:o,710:a,713:o,732:s,770:a,771:s,818:c,8211:c,8212:c,8213:c,8214:{dir:n.V,sizes:[.602,1],schar:[0,8741],stretch:[0,8741],HDW:[.75,.25,.5]},8215:c,8254:o,8407:u,8592:l,8593:{dir:n.V,sizes:[.888],stretch:[8593,9168],HDW:[.694,.193,.667]},8594:u,8595:{dir:n.V,sizes:[.888],stretch:[0,9168,8595],HDW:[.694,.194,.667]},8596:h,8597:{dir:n.V,sizes:[1.044],stretch:[8593,9168,8595],HDW:[.772,.272,.667]},8606:{dir:n.H,sizes:[1],stretch:[8606,8722],HDW:e.HDW2},8608:{dir:n.H,sizes:[1],stretch:[0,8722,8608],HDW:e.HDW2},8612:f,8613:{dir:n.V,stretch:[8593,9168,8869],HDW:e.HDW1,min:1.555},8614:p,8615:{dir:n.V,stretch:[8868,9168,8595],HDW:e.HDW1,min:1.555},8624:{dir:n.V,sizes:[.722],stretch:[8624,9168],HDW:[.722,0,.667]},8625:{dir:n.V,sizes:[.722],stretch:[8625,9168],HDW:[.722,0,.667]},8636:{dir:n.H,sizes:[1],stretch:[8636,8722],HDW:e.HDW2},8637:{dir:n.H,sizes:[1],stretch:[8637,8722],HDW:e.HDW2},8638:{dir:n.V,sizes:[.888],stretch:[8638,9168],HDW:[.694,.194,.667]},8639:{dir:n.V,sizes:[.888],stretch:[8639,9168],HDW:[.694,.194,.667]},8640:{dir:n.H,sizes:[1],stretch:[0,8722,8640],HDW:e.HDW2},8641:{dir:n.H,sizes:[1],stretch:[0,8722,8641],HDW:e.HDW2},8642:{dir:n.V,sizes:[.888],stretch:[0,9168,8642],HDW:[.694,.194,.667]},8643:{dir:n.V,sizes:[.888],stretch:[0,9168,8643],HDW:[.694,.194,.667]},8656:d,8657:{dir:n.V,sizes:[.888],stretch:[8657,8214],HDW:[.694,.194,.778]},8658:m,8659:{dir:n.V,sizes:[.888],stretch:[0,8214,8659],HDW:[.694,.194,.778]},8660:y,8661:{dir:n.V,sizes:[1.044],stretch:[8657,8214,8659],HDW:[.772,.272,.778]},8666:{dir:n.H,sizes:[1],stretch:[8666,8801],HDW:[.464,-.036,1]},8667:{dir:n.H,sizes:[1],stretch:[0,8801,8667],HDW:[.464,-.036,1]},8722:v,8725:i,8730:{dir:n.V,sizes:e.VSIZES,stretch:[57345,57344,9143],HDW:[.8,.2,1.056]},8739:b,8741:{dir:n.V,sizes:[1],stretch:[0,8741],HDW:[.75,.25,.5]},8968:{dir:n.V,sizes:e.VSIZES,stretch:[9121,9122],HDW:e.HDW1},8969:{dir:n.V,sizes:e.VSIZES,stretch:[9124,9125],HDW:e.HDW1},8970:{dir:n.V,sizes:e.VSIZES,stretch:[0,9122,9123],HDW:e.HDW1},8971:{dir:n.V,sizes:e.VSIZES,stretch:[0,9125,9126],HDW:e.HDW1},8978:g,8994:g,8995:M,9001:S,9002:E,9130:{dir:n.V,sizes:[.32],stretch:[9130,9130,9130],HDW:[.29,.015,.889]},9135:c,9136:{dir:n.V,sizes:[.989],stretch:[9127,9130,9133],HDW:[.744,.244,.889]},9137:{dir:n.V,sizes:[.989],stretch:[9131,9130,9129],HDW:[.744,.244,.889]},9140:{dir:n.H,stretch:[9484,8722,9488],HDW:e.HDW3,min:1},9141:{dir:n.H,stretch:[9492,8722,9496],HDW:e.HDW3,min:1},9168:{dir:n.V,sizes:[.602,1],schar:[0,8739],stretch:[0,8739],HDW:[.602,0,.278]},9180:g,9181:M,9182:O,9183:x,9184:{dir:n.H,stretch:[714,713,715],HDW:[.59,-.544,.5],min:1},9185:{dir:n.H,stretch:[715,713,714],HDW:[.59,-.544,.5],min:1},9472:c,10072:b,10216:S,10217:E,10222:{dir:n.V,sizes:[.989],stretch:[9127,9130,9129],HDW:[.744,.244,.889]},10223:{dir:n.V,sizes:[.989],stretch:[9131,9130,9133],HDW:[.744,.244,.889]},10229:l,10230:u,10231:h,10232:d,10233:m,10234:y,10235:f,10236:p,10237:C,10238:_,10502:C,10503:_,10574:{dir:n.H,stretch:[8636,8722,8640],HDW:e.HDW3,min:2},10575:{dir:n.V,stretch:[8638,9168,8642],HDW:e.HDW1,min:1.776},10576:{dir:n.H,stretch:[8637,8722,8641],HDW:e.HDW3,min:2},10577:{dir:n.V,stretch:[8639,9168,8643],HDW:e.HDW1,min:.5},10586:{dir:n.H,stretch:[8636,8722,8739],HDW:e.HDW3,min:1.278},10587:{dir:n.H,stretch:[8739,8722,8640],HDW:e.HDW3,min:1.278},10588:{dir:n.V,stretch:[8638,9168,8869],HDW:e.HDW1,min:1.556},10589:{dir:n.V,stretch:[8868,9168,8642],HDW:e.HDW1,min:1.556},10590:{dir:n.H,stretch:[8637,8722,8739],HDW:e.HDW3,min:1.278},10591:{dir:n.H,stretch:[8739,8722,8641],HDW:e.HDW3,min:1.278},10592:{dir:n.V,stretch:[8639,9168,8869],HDW:e.HDW1,min:1.776},10593:{dir:n.V,stretch:[8868,9168,8643],HDW:e.HDW1,min:1.776},12296:S,12297:E,65079:O,65080:x}},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),l=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")},u=this&&this.__read||function(t,e){var r="function"==typeof Symbol&&t[Symbol.iterator];if(!r)return t;var n,i,o=r.call(t),a=[];try{for(;(void 0===e||0=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var o=r(20),l=r(13),n=r(3),i=r(225),a=r(227),s=r(228),c="undefined"!=typeof window&&window.navigator&&"Mac"===window.navigator.platform.substr(0,3),h=(Object.defineProperty(f.prototype,"isLoading",{get:function(){return 0/g,">")},f.prototype.toMML=function(t){return this.MmlVisitor.visitTree(t.root,t,{texHints:this.settings.texHints,semantics:this.settings.semantics&&"MathML"!==t.inputJax.name})},f.prototype.zoom=function(t,e,r){t&&!this.isZoomEvent(t,e)||(this.menu.mathItem=r,t&&this.menu.post(t),this.zoomBox.post())},f.prototype.isZoomEvent=function(t,e){return this.settings.zoom===e&&(!this.settings.alt||t.altKey)&&(!this.settings.ctrl||t.ctrlKey)&&(!this.settings.cmd||t.metaKey)&&(!this.settings.shift||t.shiftKey)},f.prototype.rerender=function(t){void 0===t&&(t=l.STATE.TYPESET),this.rerenderStart=Math.min(t,this.rerenderStart),f.loading||(this.document.rerender(this.rerenderStart),this.rerenderStart=l.STATE.LAST)},f.prototype.copyMathML=function(){this.copyToClipboard(this.toMML(this.menu.mathItem))},f.prototype.copyOriginal=function(){this.copyToClipboard(this.menu.mathItem.math)},f.prototype.copyAnnotation=function(){this.copyToClipboard(this.menu.annotation)},f.prototype.copyToClipboard=function(t){var e=document.createElement("textarea");e.value=t,e.setAttribute("readonly",""),e.style.cssText="height: 1px; width: 1px; padding: 1px; position: absolute; left: -10px",document.body.appendChild(e),e.select();try{document.execCommand("copy")}catch(t){alert("Can't copy to clipboard: "+t.message)}document.body.removeChild(e)},f.prototype.addMenu=function(e){var r=this,t=e.typesetRoot;t.addEventListener("contextmenu",function(){return r.menu.mathItem=e},!0),t.addEventListener("keydown",function(){return r.menu.mathItem=e},!0),t.addEventListener("click",function(t){return r.zoom(t,"Click",e)},!0),t.addEventListener("dblclick",function(t){return r.zoom(t,"DoubleClick",e)},!0),this.menu.getStore().insert(t)},f.prototype.clear=function(){this.menu.getStore().clear()},f.prototype.variable=function(e,r){var n=this;return{name:e,getter:function(){return n.settings[e]},setter:function(t){n.settings[e]=t,r&&r(t),n.saveUserSettings()}}},f.prototype.a11yVar=function(r){var n=this;return{name:r,getter:function(){return n.getA11y(r)},setter:function(t){n.settings[r]=t;var e={};e[r]=t,n.setA11y(e),n.saveUserSettings()}}},f.prototype.submenu=function(t,e,r,n){var i,o;void 0===r&&(r=[]),void 0===n&&(n=!1);var a=[];try{for(var s=u(r),c=s.next();!c.done;c=s.next()){var l=c.value;Array.isArray(l)?a=a.concat(l):a.push(l)}}catch(t){i={error:t}}finally{try{c&&!c.done&&(o=s.return)&&o.call(s)}finally{if(i)throw i.error}}return{type:"submenu",id:t,content:e,menu:{items:a},disabled:0===a.length||n}},f.prototype.command=function(t,e,r,n){return void 0===n&&(n={}),Object.assign({type:"command",id:t,content:e,action:r},n)},f.prototype.checkbox=function(t,e,r,n){return void 0===n&&(n={}),Object.assign({type:"checkbox",id:t,content:e,variable:r},n)},f.prototype.radioGroup=function(e,t){var r=this;return t.map(function(t){return r.radio(t[0],t[1]||t[0],e)})},f.prototype.radio=function(t,e,r,n){return void 0===n&&(n={}),Object.assign({type:"radio",id:t,content:e,variable:r},n)},f.prototype.label=function(t,e){return{type:"label",id:t,content:e}},f.prototype.rule=function(){return{type:"rule"}},f.MENU_STORAGE="MathJax-Menu-Settings",f.OPTIONS={settings:{texHints:!0,semantics:!1,zoom:"NoZoom",zscale:"200%",renderer:"CHTML",alt:!1,cmd:!1,ctrl:!1,shift:!1,scale:1,autocollapse:!1,collapsible:!1,inTabOrder:!0,explorer:!1},jax:{CHTML:null,SVG:null},annotationTypes:n.expandable({TeX:["TeX","LaTeX","application/x-tex"],StarMath:["StarMath 5.0"],Maple:["Maple"],ContentMathML:["MathML-Content","application/mathml-content+xml"],OpenMath:["OpenMath"]})},f.loading=0,f.loadingPromises=new Map,f._loadingPromise=null,f._loadingOK=null,f._loadingFailed=null,f);function f(t,e){var r=this;void 0===e&&(e={}),this.settings=null,this.defaultSettings=null,this.menu=null,this.MmlVisitor=new a.MmlVisitor,this.jax={CHTML:null,SVG:null},this.rerenderStart=l.STATE.LAST,this.about=new ContextMenu.Info('MathJax v'+o.mathjax.version,function(){var t=[];return t.push("Input Jax: "+r.document.inputJax.map(function(t){return t.name}).join(", ")),t.push("Output Jax: "+r.document.outputJax.name),t.push("Document Type: "+r.document.kind),t.join("
")},'www.mathjax.org'),this.help=new ContextMenu.Info("MathJax Help",function(){return["

MathJax is a JavaScript library that allows page"," authors to include mathematics within their web pages."," As a reader, you don't need to do anything to make that happen.

","

Browsers: MathJax works with all modern browsers including"," Edge, Firefox, Chrome, Safari, Opera, and most mobile browsers.

","

Math Menu: MathJax adds a contextual menu to equations."," Right-click or CTRL-click on any mathematics to access the menu.

",'
',"

Show Math As: These options allow you to view the formula's"," source markup (as MathML or in its original format).

","

Copy to Clipboard: These options copy the formula's source markup,"," as MathML or in its original format, to the clipboard"," (in browsers that support that).

","

Math Settings: These give you control over features of MathJax,"," such the size of the mathematics, and the mechanism used"," to display equations.

","

Accessibility: MathJax can work with screen"," readers to make mathematics accessible to the visually impaired."," Turn on the explorer to enable generation of speech strings"," and the ability to investigate expressions interactively.

","

Language: This menu lets you select the language used by MathJax"," for its menus and warning messages. (Not yet implemented in version 3.)

","
","

Math Zoom: If you are having difficulty reading an"," equation, MathJax can enlarge it to help you see it better, or"," you can scall all the math on the page to make it larger."," Turn these features on in the Math Settings menu.

","

Preferences: MathJax uses your browser's localStorage database"," to save the preferences set via this menu locally in your browser. These"," are not used to track you, and are not transferred or used remotely by"," MathJax in any way.

"].join("\n")},'www.mathjax.org'),this.mathmlCode=new s.SelectableInfo("MathJax MathML Expression",function(){if(!r.menu.mathItem)return"";var t=r.toMML(r.menu.mathItem);return"
"+r.formatSource(t)+"
"},""),this.originalText=new s.SelectableInfo("MathJax Original Source",function(){if(!r.menu.mathItem)return"";var t=r.menu.mathItem.math;return'
'+r.formatSource(t)+"
"},""),this.annotationText=new s.SelectableInfo("MathJax Annotation Text",function(){if(!r.menu.mathItem)return"";var t=r.menu.annotation;return'
'+r.formatSource(t)+"
"},""),this.zoomBox=new ContextMenu.Info("MathJax Zoomed Expression",function(){if(!r.menu.mathItem)return"";var t=r.menu.mathItem.typesetRoot.cloneNode(!0);return t.style.margin="0",'
'+t.outerHTML+"
"},""),this.document=t,this.options=n.userOptions(n.defaultOptions({},this.constructor.OPTIONS),e),this.initSettings(),this.mergeUserSettings(),this.initMenu()}e.Menu=h},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)});Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(96),s=r(3),c=(o=a.SerializedMmlVisitor,i(l,o),l.prototype.visitTree=function(t,e,r){return void 0===e&&(e=null),void 0===r&&(r={}),this.mathItem=e,s.userOptions(this.options,r),this.visitNode(t,"")},l.prototype.visitTeXAtomNode=function(t,e){return this.options.texHints?o.prototype.visitTeXAtomNode.call(this,t,e):t.childNodes[0]&&1===t.childNodes[0].childNodes.length?this.visitNode(t.childNodes[0],e):e+"\n"+this.childNodeMml(t,e+" ","\n")+e+""},l.prototype.visitMathNode=function(t,e){if(!this.options.semantics||"TeX"!==this.mathItem.inputJax.name)return o.prototype.visitDefault.call(this,t,e);var r=t.childNodes.length&&1\n"+e+" \n"+(r?e+" \n":"")+this.childNodeMml(t,e+(r?" ":" "),"\n")+(r?e+" \n":"")+e+' '+this.mathItem.math+"\n"+e+" \n"+e+""},l);function l(){var t=null!==o&&o.apply(this,arguments)||this;return t.options={texHints:!0,semantics:!1},t.mathItem=null,t}e.MmlVisitor=c},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)});Object.defineProperty(e,"__esModule",{value:!0});var o,a=(o=ContextMenu.Info,i(s,o),s.prototype.addEvents=function(t){var e=this;t.addEventListener("keypress",function(t){"a"===t.key&&(t.ctrlKey||t.metaKey)&&(e.selectAll(),e.stop(t))})},s.prototype.selectAll=function(){document.getSelection().selectAllChildren(this.getHtml().querySelector("pre"))},s.prototype.copyToClipboard=function(){this.selectAll();try{document.execCommand("copy")}catch(t){alert("Can't copy to clipboard: "+t.message)}document.getSelection().removeAllRanges()},s.prototype.generateHtml=function(){var e=this;o.prototype.generateHtml.call(this);var t=this.getHtml().querySelector("span."+ContextMenu.HtmlClasses.INFOSIGNATURE).appendChild(document.createElement("input"));t.type="button",t.value="Copy to Clipboard",t.addEventListener("click",function(t){return e.copyToClipboard()})},s);function s(){return null!==o&&o.apply(this,arguments)||this}e.SelectableInfo=a},function(t,e,r){"use strict";var n,o=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),a=this&&this.__assign||function(){return(a=Object.assign||function(t){for(var e,r=1,n=arguments.length;r=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var l=r(20),u=r(13),h=r(3),f=r(226);function p(t){return o(e,n=t),e.prototype.addMenu=function(t){this.state()=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(S,"__esModule",{value:!0});var t,c,r,o,l,n=E(5),a=E(24);function u(t){return r.visitTree(t,c.document)}function h(){r=new S.MathJax._.core.MmlTree.SerializedMmlVisitor.SerializedMmlVisitor,o=S.MathJax._.mathjax.mathjax,c.input=v(),c.output=b(),c.adaptor=g(),c.handler&&o.handlers.unregister(c.handler),c.handler=M(),c.handler&&(o.handlers.register(c.handler),c.document=O())}function f(){var e,t;c.input&&c.output&&p();var r=c.output?c.output.name.toLowerCase():"";try{for(var n=s(c.input),i=n.next();!i.done;i=n.next()){var o=i.value,a=o.name.toLowerCase();m(a,o),y(a,o),c.output&&d(a,r,o)}}catch(t){e={error:t}}finally{try{i&&!i.done&&(t=n.return)&&t.call(n)}finally{if(e)throw e.error}}}function p(){S.MathJax.typeset=function(t){void 0===t&&(t=null),c.document.options.elements=t,c.document.render()},S.MathJax.typesetPromise=function(t){return void 0===t&&(t=null),c.document.options.elements=t,o.handleRetriesFor(function(){c.document.render()})},S.MathJax.typesetClear=function(){return c.document.clear()}}function d(t,e,r){var n=t+"2"+e;S.MathJax[n]=function(t,e){return void 0===e&&(e={}),e.format=r.name,c.document.convert(t,e)},S.MathJax[n+"Promise"]=function(t,e){return void 0===e&&(e={}),e.format=r.name,o.handleRetriesFor(function(){return c.document.convert(t,e)})},S.MathJax[e+"Stylesheet"]=function(){return c.output.styleSheet(c.document)},"getMetricsFor"in c.output&&(S.MathJax.getMetricsFor=function(t,e){return c.output.getMetricsFor(t,e)})}function m(t,r){var n=S.MathJax._.core.MathItem.STATE;S.MathJax[t+"2mml"]=function(t,e){return void 0===e&&(e={}),e.end=n.CONVERT,e.format=r.name,u(c.document.convert(t,e))},S.MathJax[t+"2mmlPromise"]=function(t,e){return void 0===e&&(e={}),e.end=n.CONVERT,e.format=r.name,o.handleRetriesFor(function(){return u(c.document.convert(t,e))})}}function y(t,e){"tex"===t&&(S.MathJax.texReset=function(t){return void 0===t&&(t=0),e.parseOptions.tags.reset(t)})}function v(){var e,t,r=[];try{for(var n=s(S.CONFIG.input),i=n.next();!i.done;i=n.next()){var o=i.value,a=c.constructors[o];if(!a)throw Error('Input Jax "'+o+'" is not defined (has it been loaded?)');r.push(new a(S.MathJax.config[o]))}}catch(t){e={error:t}}finally{try{i&&!i.done&&(t=n.return)&&t.call(n)}finally{if(e)throw e.error}}return r}function b(){var t=S.CONFIG.output;if(!t)return null;var e=c.constructors[t];if(!e)throw Error('Output Jax "'+t+'" is not defined (has it been loaded?)');return new e(S.MathJax.config[t])}function g(){var t=S.CONFIG.adaptor;if(!t||"none"===t)return null;var e=c.constructors[t];if(!e)throw Error('DOMAdaptor "'+t+'" is not defined (has it been loaded?)');return e(S.MathJax.config[t])}function M(){var e,t,r=S.CONFIG.handler;if(!r||"none"===r||!c.adaptor)return null;var n=c.constructors[r];if(!n)throw Error('Handler "'+r+'" is not defined (has it been loaded?)');var i=new n(c.adaptor,5);try{for(var o=s(l),a=o.next();!a.done;a=o.next()){i=a.value.item(i)}}catch(t){e={error:t}}finally{try{a&&!a.done&&(t=o.return)&&t.call(o)}finally{if(e)throw e.error}}return i}function O(t){return void 0===t&&(t=null),o.document(t||S.CONFIG.document,e(e({},S.MathJax.config.options),{InputJax:c.input,OutputJax:c.output}))}c=t=S.Startup||(S.Startup={}),l=new a.PrioritizedList,c.constructors={},c.input=[],c.output=null,c.handler=null,c.adaptor=null,c.elements=null,c.document=null,c.promise=new Promise(function(t,e){var r=i.document;if(r&&r.readyState&&"complete"!==r.readyState&&"interactive"!==r.readyState){var n=function(){return t()};r.defaultView.addEventListener("load",n,!0),r.defaultView.addEventListener("DOMContentLoaded",n,!0)}else t()}),c.toMML=u,c.registerConstructor=function(t,e){c.constructors[t]=e},c.useHandler=function(t,e){void 0===e&&(e=!1),S.CONFIG.handler&&!e||(S.CONFIG.handler=t)},c.useAdaptor=function(t,e){void 0===e&&(e=!1),S.CONFIG.adaptor&&!e||(S.CONFIG.adaptor=t)},c.useInput=function(t,e){void 0===e&&(e=!1),x&&!e||S.CONFIG.input.push(t)},c.useOutput=function(t,e){void 0===e&&(e=!1),S.CONFIG.output&&!e||(S.CONFIG.output=t)},c.extendHandler=function(t,e){void 0===e&&(e=10),l.add(t,e)},c.defaultReady=function(){h(),f(),c.promise=c.promise.then(function(){return S.CONFIG.pageReady()})},c.defaultPageReady=function(){return S.CONFIG.typeset&&S.MathJax.typesetPromise?S.MathJax.typesetPromise():null},c.getComponents=h,c.makeMethods=f,c.makeTypesetMethods=p,c.makeOutputMethods=d,c.makeMmlMethods=m,c.makeResetMethod=y,c.getInputJax=v,c.getOutputJax=b,c.getAdaptor=g,c.getHandler=M,c.getDocument=O,S.MathJax=n.MathJax,void 0===S.MathJax._.startup&&(n.combineDefaults(S.MathJax.config,"startup",{input:[],output:"",handler:null,adaptor:null,document:"undefined"==typeof document?"":document,elements:null,typeset:!0,ready:t.defaultReady.bind(t),pageReady:t.defaultPageReady.bind(t)}),n.combineWithMathJax({startup:t,options:{}})),S.CONFIG=S.MathJax.config.startup;var x=0!==S.CONFIG.input.length}).call(this,E(28))},function(t,e,r){"use strict";r(17).Loader.preLoad("loader","startup","core","input/tex","input/mml","output/chtml","output/chtml/fonts/tex.js","ui/menu")},function(t,e,r){"use strict";r(234);var n=r(70),i=r(81);MathJax.startup&&(MathJax.startup.registerConstructor("HTMLHandler",n.HTMLHandler),MathJax.startup.registerConstructor("browserAdaptor",i.browserAdaptor),MathJax.startup.useHandler("HTMLHandler"),MathJax.startup.useAdaptor("browserAdaptor")),MathJax.loader&&(MathJax._.mathjax.mathjax.asyncLoad=function(t){return MathJax.loader.load(t)})},function(t,e,r){"use strict";var n=r(5),i=Ct(n),o=Ct(r(79)),a=Ct(r(81)),s=Ct(r(80)),c=Ct(r(40)),l=Ct(r(82)),u=Ct(r(94)),h=Ct(r(29)),f=Ct(r(41)),p=Ct(r(13)),d=Ct(r(43)),m=Ct(r(19)),y=Ct(r(85)),v=Ct(r(235)),b=Ct(r(44)),g=Ct(r(0)),M=Ct(r(67)),O=Ct(r(59)),x=Ct(r(90)),S=Ct(r(91)),E=Ct(r(46)),C=Ct(r(92)),_=Ct(r(58)),T=Ct(r(88)),w=Ct(r(57)),A=Ct(r(53)),k=Ct(r(65)),I=Ct(r(47)),L=Ct(r(61)),N=Ct(r(48)),P=Ct(r(26)),B=Ct(r(56)),R=Ct(r(89)),j=Ct(r(55)),H=Ct(r(52)),D=Ct(r(51)),X=Ct(r(50)),F=Ct(r(54)),W=Ct(r(87)),J=Ct(r(31)),q=Ct(r(62)),V=Ct(r(64)),U=Ct(r(49)),z=Ct(r(63)),G=Ct(r(60)),K=Ct(r(66)),Z=Ct(r(68)),Y=Ct(r(86)),$=Ct(r(96)),Q=Ct(r(42)),tt=Ct(r(30)),et=Ct(r(45)),rt=Ct(r(84)),nt=Ct(r(95)),it=Ct(r(97)),ot=Ct(r(98)),at=Ct(r(236)),st=Ct(r(99)),ct=Ct(r(102)),lt=Ct(r(70)),ut=Ct(r(100)),ht=Ct(r(101)),ft=Ct(r(20)),pt=Ct(r(103)),dt=Ct(r(93)),mt=Ct(r(12)),yt=Ct(r(25)),vt=Ct(r(83)),bt=Ct(r(3)),gt=Ct(r(24)),Mt=Ct(r(69)),Ot=Ct(r(71)),xt=Ct(r(14)),St=Ct(r(104)),Et=Ct(r(10));function Ct(t){if(t&&t.__esModule)return t;var e={};if(null!=t)for(var r in t)Object.prototype.hasOwnProperty.call(t,r)&&(e[r]=t[r]);return e.default=t,e}(0,n.combineWithMathJax)({_:{adaptors:{HTMLAdaptor:o,browserAdaptor:a},components:{global:i},core:{DOMAdaptor:s,FindMath:c,Handler:l,HandlerList:u,InputJax:h,MathDocument:f,MathItem:p,MathList:d,MmlTree:{Attributes:m,MML:y,MathMLVisitor:v,MmlFactory:b,MmlNode:g,MmlNodes:{TeXAtom:M,maction:O,maligngroup:x,malignmark:S,math:E,mathchoice:C,menclose:_,merror:T,mfenced:w,mfrac:A,mglyph:k,mi:I,mmultiscripts:L,mn:N,mo:P,mpadded:B,mphantom:R,mroot:j,mrow:H,ms:D,mspace:X,msqrt:F,mstyle:W,msubsup:J,mtable:q,mtd:V,mtext:U,mtr:z,munderover:G,semantics:K},MmlVisitor:Z,OperatorDictionary:Y,SerializedMmlVisitor:$},OutputJax:Q,Tree:{Factory:tt,Node:et,NodeFactory:rt,Visitor:nt,Wrapper:it,WrapperFactory:ot}},handlers:{html_ts:at,html:{HTMLDocument:st,HTMLDomStrings:ct,HTMLHandler:lt,HTMLMathItem:ut,HTMLMathList:ht}},mathjax:ft,util:{AsyncLoad:pt,BitField:dt,Entities:mt,FunctionList:yt,LinkedList:vt,Options:bt,PrioritizedList:gt,Retries:Mt,Styles:Ot,lengths:xt,numeric:St,string:Et}}})},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),l=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(68),s=(o=a.MmlVisitor,i(c,o),c.prototype.visitTree=function(t,e){var r=(this.document=e).createElement("top");return this.visitNode(t,r),this.document=null,r.firstChild},c.prototype.visitTextNode=function(t,e){e.appendChild(this.document.createTextNode(t.getText()))},c.prototype.visitXMLNode=function(t,e){e.appendChild(t.getXML().cloneNode(!0))},c.prototype.visitInferredMrowNode=function(t,e){var r,n;try{for(var i=l(t.childNodes),o=i.next();!o.done;o=i.next()){var a=o.value;this.visitNode(a,e)}}catch(t){r={error:t}}finally{try{o&&!o.done&&(n=i.return)&&n.call(i)}finally{if(r)throw r.error}}},c.prototype.visitDefault=function(t,e){var r,n,i=this.document.createElement(t.kind);this.addAttributes(t,i);try{for(var o=l(t.childNodes),a=o.next();!a.done;a=o.next()){var s=a.value;this.visitNode(s,i)}}catch(t){r={error:t}}finally{try{a&&!a.done&&(n=o.return)&&n.call(o)}finally{if(r)throw r.error}}e.appendChild(i)},c.prototype.addAttributes=function(t,e){var r,n,i=t.attributes,o=i.getExplicitNames();try{for(var a=l(o),s=a.next();!s.done;s=a.next()){var c=s.value;e.setAttribute(c,i.getExplicit(c).toString())}}catch(t){r={error:t}}finally{try{s&&!s.done&&(n=a.return)&&n.call(a)}finally{if(r)throw r.error}}},c);function c(){var t=null!==o&&o.apply(this,arguments)||this;return t.document=null,t}e.MathMLVisitor=s},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(20),i=r(70);e.RegisterHTMLHandler=function(t){var e=new i.HTMLHandler(t);return n.mathjax.handlers.register(e),e}},function(t,e,r){"use strict";r(238);var n=r(249);r(17).Loader.preLoad("input/tex-base","[tex]/ams","[tex]/newcommand","[tex]/noundefined","[tex]/require","[tex]/autoload","[tex]/configMacros"),(0,n.registerTeX)(["base","ams","newcommand","noundefined","require","autoload","configMacros"])},function(t,e,r){"use strict";var n=r(5),i=j(r(105)),o=j(r(11)),a=j(r(107)),s=j(r(106)),c=j(r(8)),l=j(r(112)),u=j(r(6)),h=j(r(33)),f=j(r(110)),p=j(r(7)),d=j(r(109)),m=j(r(32)),y=j(r(111)),v=j(r(22)),b=j(r(9)),g=j(r(27)),M=j(r(15)),O=j(r(4)),x=j(r(21)),S=j(r(242)),E=j(r(114)),C=j(r(115)),_=j(r(244)),T=j(r(113)),w=j(r(34)),A=j(r(35)),k=j(r(245)),I=j(r(246)),L=j(r(118)),N=j(r(72)),P=j(r(117)),B=j(r(248)),R=j(r(116));function j(t){if(t&&t.__esModule)return t;var e={};if(null!=t)for(var r in t)Object.prototype.hasOwnProperty.call(t,r)&&(e[r]=t[r]);return e.default=t,e}(0,n.combineWithMathJax)({_:{input:{tex_ts:i,tex:{Configuration:o,FilterUtil:a,FindTeX:s,MapHandler:c,NodeFactory:l,NodeUtil:u,ParseMethods:h,ParseOptions:f,ParseUtil:p,Stack:d,StackItem:m,StackItemFactory:y,Symbol:v,SymbolMap:b,Tags:g,TexConstants:M,TexError:O,TexParser:x,ams:{AmsConfiguration:S,AmsItems:E,AmsMethods:C},autoload:{AutoloadConfiguration:_},base:{BaseConfiguration:T,BaseItems:w,BaseMethods:A},config_macros:{ConfigMacrosConfiguration:k},newcommand:{NewcommandConfiguration:I,NewcommandItems:L,NewcommandMethods:N,NewcommandUtil:P},noundefined:{NoUndefinedConfiguration:B},require:{RequireConfiguration:R}}}}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(9),i=r(15),o=r(35),a=r(33),s=r(0);new n.RegExpMap("letter",a.default.variable,/[a-z]/i),new n.RegExpMap("digit",a.default.digit,/[0-9.,]/),new n.RegExpMap("command",a.default.controlSequence,/^\\/),new n.MacroMap("special",{"{":"Open","}":"Close","~":"Tilde","^":"Superscript",_:"Subscript"," ":"Space","\t":"Space","\r":"Space","\n":"Space","'":"Prime","%":"Comment","&":"Entry","#":"Hash","\xa0":"Space","\u2019":"Prime"},o.default),new n.CharacterMap("mathchar0mi",a.default.mathchar0mi,{alpha:"\u03b1",beta:"\u03b2",gamma:"\u03b3",delta:"\u03b4",epsilon:"\u03f5",zeta:"\u03b6",eta:"\u03b7",theta:"\u03b8",iota:"\u03b9",kappa:"\u03ba",lambda:"\u03bb",mu:"\u03bc",nu:"\u03bd",xi:"\u03be",omicron:"\u03bf",pi:"\u03c0",rho:"\u03c1",sigma:"\u03c3",tau:"\u03c4",upsilon:"\u03c5",phi:"\u03d5",chi:"\u03c7",psi:"\u03c8",omega:"\u03c9",varepsilon:"\u03b5",vartheta:"\u03d1",varpi:"\u03d6",varrho:"\u03f1",varsigma:"\u03c2",varphi:"\u03c6",S:["\xa7",{mathvariant:i.TexConstant.Variant.NORMAL}],aleph:["\u2135",{mathvariant:i.TexConstant.Variant.NORMAL}],hbar:["\u210f",{variantForm:!0}],imath:"\u0131",jmath:"\u0237",ell:"\u2113",wp:["\u2118",{mathvariant:i.TexConstant.Variant.NORMAL}],Re:["\u211c",{mathvariant:i.TexConstant.Variant.NORMAL}],Im:["\u2111",{mathvariant:i.TexConstant.Variant.NORMAL}],partial:["\u2202",{mathvariant:i.TexConstant.Variant.NORMAL}],infty:["\u221e",{mathvariant:i.TexConstant.Variant.NORMAL}],prime:["\u2032",{mathvariant:i.TexConstant.Variant.NORMAL,variantForm:!0}],emptyset:["\u2205",{mathvariant:i.TexConstant.Variant.NORMAL}],nabla:["\u2207",{mathvariant:i.TexConstant.Variant.NORMAL}],top:["\u22a4",{mathvariant:i.TexConstant.Variant.NORMAL}],bot:["\u22a5",{mathvariant:i.TexConstant.Variant.NORMAL}],angle:["\u2220",{mathvariant:i.TexConstant.Variant.NORMAL}],triangle:["\u25b3",{mathvariant:i.TexConstant.Variant.NORMAL}],backslash:["\u2216",{mathvariant:i.TexConstant.Variant.NORMAL,variantForm:!0}],forall:["\u2200",{mathvariant:i.TexConstant.Variant.NORMAL}],exists:["\u2203",{mathvariant:i.TexConstant.Variant.NORMAL}],neg:["\xac",{mathvariant:i.TexConstant.Variant.NORMAL}],lnot:["\xac",{mathvariant:i.TexConstant.Variant.NORMAL}],flat:["\u266d",{mathvariant:i.TexConstant.Variant.NORMAL}],natural:["\u266e",{mathvariant:i.TexConstant.Variant.NORMAL}],sharp:["\u266f",{mathvariant:i.TexConstant.Variant.NORMAL}],clubsuit:["\u2663",{mathvariant:i.TexConstant.Variant.NORMAL}],diamondsuit:["\u2662",{mathvariant:i.TexConstant.Variant.NORMAL}],heartsuit:["\u2661",{mathvariant:i.TexConstant.Variant.NORMAL}],spadesuit:["\u2660",{mathvariant:i.TexConstant.Variant.NORMAL}]}),new n.CharacterMap("mathchar0mo",a.default.mathchar0mo,{surd:"\u221a",coprod:["\u2210",{texClass:s.TEXCLASS.OP,movesupsub:!0}],bigvee:["\u22c1",{texClass:s.TEXCLASS.OP,movesupsub:!0}],bigwedge:["\u22c0",{texClass:s.TEXCLASS.OP,movesupsub:!0}],biguplus:["\u2a04",{texClass:s.TEXCLASS.OP,movesupsub:!0}],bigcap:["\u22c2",{texClass:s.TEXCLASS.OP,movesupsub:!0}],bigcup:["\u22c3",{texClass:s.TEXCLASS.OP,movesupsub:!0}],int:["\u222b",{texClass:s.TEXCLASS.OP}],intop:["\u222b",{texClass:s.TEXCLASS.OP,movesupsub:!0,movablelimits:!0}],iint:["\u222c",{texClass:s.TEXCLASS.OP}],iiint:["\u222d",{texClass:s.TEXCLASS.OP}],prod:["\u220f",{texClass:s.TEXCLASS.OP,movesupsub:!0}],sum:["\u2211",{texClass:s.TEXCLASS.OP,movesupsub:!0}],bigotimes:["\u2a02",{texClass:s.TEXCLASS.OP,movesupsub:!0}],bigoplus:["\u2a01",{texClass:s.TEXCLASS.OP,movesupsub:!0}],bigodot:["\u2a00",{texClass:s.TEXCLASS.OP,movesupsub:!0}],oint:["\u222e",{texClass:s.TEXCLASS.OP}],bigsqcup:["\u2a06",{texClass:s.TEXCLASS.OP,movesupsub:!0}],smallint:["\u222b",{largeop:!1}],triangleleft:"\u25c3",triangleright:"\u25b9",bigtriangleup:"\u25b3",bigtriangledown:"\u25bd",wedge:"\u2227",land:"\u2227",vee:"\u2228",lor:"\u2228",cap:"\u2229",cup:"\u222a",ddagger:"\u2021",dagger:"\u2020",sqcap:"\u2293",sqcup:"\u2294",uplus:"\u228e",amalg:"\u2a3f",diamond:"\u22c4",bullet:"\u2219",wr:"\u2240",div:"\xf7",odot:["\u2299",{largeop:!1}],oslash:["\u2298",{largeop:!1}],otimes:["\u2297",{largeop:!1}],ominus:["\u2296",{largeop:!1}],oplus:["\u2295",{largeop:!1}],mp:"\u2213",pm:"\xb1",circ:"\u2218",bigcirc:"\u25ef",setminus:["\u2216",{variantForm:!0}],cdot:"\u22c5",ast:"\u2217",times:"\xd7",star:"\u22c6",propto:"\u221d",sqsubseteq:"\u2291",sqsupseteq:"\u2292",parallel:"\u2225",mid:"\u2223",dashv:"\u22a3",vdash:"\u22a2",leq:"\u2264",le:"\u2264",geq:"\u2265",ge:"\u2265",lt:"<",gt:">",succ:"\u227b",prec:"\u227a",approx:"\u2248",succeq:"\u2ab0",preceq:"\u2aaf",supset:"\u2283",subset:"\u2282",supseteq:"\u2287",subseteq:"\u2286",in:"\u2208",ni:"\u220b",notin:"\u2209",owns:"\u220b",gg:"\u226b",ll:"\u226a",sim:"\u223c",simeq:"\u2243",perp:"\u22a5",equiv:"\u2261",asymp:"\u224d",smile:"\u2323",frown:"\u2322",ne:"\u2260",neq:"\u2260",cong:"\u2245",doteq:"\u2250",bowtie:"\u22c8",models:"\u22a8",notChar:"\u29f8",Leftrightarrow:"\u21d4",Leftarrow:"\u21d0",Rightarrow:"\u21d2",leftrightarrow:"\u2194",leftarrow:"\u2190",gets:"\u2190",rightarrow:"\u2192",to:"\u2192",mapsto:"\u21a6",leftharpoonup:"\u21bc",leftharpoondown:"\u21bd",rightharpoonup:"\u21c0",rightharpoondown:"\u21c1",nearrow:"\u2197",searrow:"\u2198",nwarrow:"\u2196",swarrow:"\u2199",rightleftharpoons:"\u21cc",hookrightarrow:"\u21aa",hookleftarrow:"\u21a9",longleftarrow:"\u27f5",Longleftarrow:"\u27f8",longrightarrow:"\u27f6",Longrightarrow:"\u27f9",Longleftrightarrow:"\u27fa",longleftrightarrow:"\u27f7",longmapsto:"\u27fc",ldots:"\u2026",cdots:"\u22ef",vdots:"\u22ee",ddots:"\u22f1",dotsc:"\u2026",dotsb:"\u22ef",dotsm:"\u22ef",dotsi:"\u22ef",dotso:"\u2026",ldotp:[".",{texClass:s.TEXCLASS.PUNCT}],cdotp:["\u22c5",{texClass:s.TEXCLASS.PUNCT}],colon:[":",{texClass:s.TEXCLASS.PUNCT}]}),new n.CharacterMap("mathchar7",a.default.mathchar7,{Gamma:"\u0393",Delta:"\u0394",Theta:"\u0398",Lambda:"\u039b",Xi:"\u039e",Pi:"\u03a0",Sigma:"\u03a3",Upsilon:"\u03a5",Phi:"\u03a6",Psi:"\u03a8",Omega:"\u03a9",_:"_","#":"#",$:"$","%":"%","&":"&",And:"&"}),new n.DelimiterMap("delimiter",a.default.delimiter,{"(":"(",")":")","[":"[","]":"]","<":"\u27e8",">":"\u27e9","\\lt":"\u27e8","\\gt":"\u27e9","/":"/","|":["|",{texClass:s.TEXCLASS.ORD}],".":"","\\\\":"\\","\\lmoustache":"\u23b0","\\rmoustache":"\u23b1","\\lgroup":"\u27ee","\\rgroup":"\u27ef","\\arrowvert":"\u23d0","\\Arrowvert":"\u2016","\\bracevert":"\u23aa","\\Vert":["\u2225",{texClass:s.TEXCLASS.ORD}],"\\|":["\u2225",{texClass:s.TEXCLASS.ORD}],"\\vert":["|",{texClass:s.TEXCLASS.ORD}],"\\uparrow":"\u2191","\\downarrow":"\u2193","\\updownarrow":"\u2195","\\Uparrow":"\u21d1","\\Downarrow":"\u21d3","\\Updownarrow":"\u21d5","\\backslash":"\\","\\rangle":"\u27e9","\\langle":"\u27e8","\\rbrace":"}","\\lbrace":"{","\\}":"}","\\{":"{","\\rceil":"\u2309","\\lceil":"\u2308","\\rfloor":"\u230b","\\lfloor":"\u230a","\\lbrack":"[","\\rbrack":"]"}),new n.CommandMap("macros",{displaystyle:["SetStyle","D",!0,0],textstyle:["SetStyle","T",!1,0],scriptstyle:["SetStyle","S",!1,1],scriptscriptstyle:["SetStyle","SS",!1,2],rm:["SetFont",i.TexConstant.Variant.NORMAL],mit:["SetFont",i.TexConstant.Variant.ITALIC],oldstyle:["SetFont",i.TexConstant.Variant.OLDSTYLE],cal:["SetFont",i.TexConstant.Variant.CALLIGRAPHIC],it:["SetFont","-tex-mathit"],bf:["SetFont",i.TexConstant.Variant.BOLD],bbFont:["SetFont",i.TexConstant.Variant.DOUBLESTRUCK],scr:["SetFont",i.TexConstant.Variant.SCRIPT],frak:["SetFont",i.TexConstant.Variant.FRAKTUR],sf:["SetFont",i.TexConstant.Variant.SANSSERIF],tt:["SetFont",i.TexConstant.Variant.MONOSPACE],tiny:["SetSize",.5],Tiny:["SetSize",.6],scriptsize:["SetSize",.7],small:["SetSize",.85],normalsize:["SetSize",1],large:["SetSize",1.2],Large:["SetSize",1.44],LARGE:["SetSize",1.73],huge:["SetSize",2.07],Huge:["SetSize",2.49],arcsin:["NamedFn"],arccos:["NamedFn"],arctan:["NamedFn"],arg:["NamedFn"],cos:["NamedFn"],cosh:["NamedFn"],cot:["NamedFn"],coth:["NamedFn"],csc:["NamedFn"],deg:["NamedFn"],det:"NamedOp",dim:["NamedFn"],exp:["NamedFn"],gcd:"NamedOp",hom:["NamedFn"],inf:"NamedOp",ker:["NamedFn"],lg:["NamedFn"],lim:"NamedOp",liminf:["NamedOp","lim inf"],limsup:["NamedOp","lim sup"],ln:["NamedFn"],log:["NamedFn"],max:"NamedOp",min:"NamedOp",Pr:"NamedOp",sec:["NamedFn"],sin:["NamedFn"],sinh:["NamedFn"],sup:"NamedOp",tan:["NamedFn"],tanh:["NamedFn"],limits:["Limits",1],nolimits:["Limits",0],overline:["UnderOver","00AF",null,1],underline:["UnderOver","005F"],overbrace:["UnderOver","23DE",1],underbrace:["UnderOver","23DF",1],overparen:["UnderOver","23DC"],underparen:["UnderOver","23DD"],overrightarrow:["UnderOver","2192"],underrightarrow:["UnderOver","2192"],overleftarrow:["UnderOver","2190"],underleftarrow:["UnderOver","2190"],overleftrightarrow:["UnderOver","2194"],underleftrightarrow:["UnderOver","2194"],overset:"Overset",underset:"Underset",stackrel:["Macro","\\mathrel{\\mathop{#2}\\limits^{#1}}",2],over:"Over",overwithdelims:"Over",atop:"Over",atopwithdelims:"Over",above:"Over",abovewithdelims:"Over",brace:["Over","{","}"],brack:["Over","[","]"],choose:["Over","(",")"],frac:"Frac",sqrt:"Sqrt",root:"Root",uproot:["MoveRoot","upRoot"],leftroot:["MoveRoot","leftRoot"],left:"LeftRight",right:"LeftRight",middle:"Middle",llap:"Lap",rlap:"Lap",raise:"RaiseLower",lower:"RaiseLower",moveleft:"MoveLeftRight",moveright:"MoveLeftRight",",":["Spacer",i.TexConstant.Length.THINMATHSPACE],":":["Spacer",i.TexConstant.Length.MEDIUMMATHSPACE],">":["Spacer",i.TexConstant.Length.MEDIUMMATHSPACE],";":["Spacer",i.TexConstant.Length.THICKMATHSPACE],"!":["Spacer",i.TexConstant.Length.NEGATIVETHINMATHSPACE],enspace:["Spacer",".5em"],quad:["Spacer","1em"],qquad:["Spacer","2em"],thinspace:["Spacer",i.TexConstant.Length.THINMATHSPACE],negthinspace:["Spacer",i.TexConstant.Length.NEGATIVETHINMATHSPACE],hskip:"Hskip",hspace:"Hskip",kern:"Hskip",mskip:"Hskip",mspace:"Hskip",mkern:"Hskip",rule:"rule",Rule:["Rule"],Space:["Rule","blank"],big:["MakeBig",s.TEXCLASS.ORD,.85],Big:["MakeBig",s.TEXCLASS.ORD,1.15],bigg:["MakeBig",s.TEXCLASS.ORD,1.45],Bigg:["MakeBig",s.TEXCLASS.ORD,1.75],bigl:["MakeBig",s.TEXCLASS.OPEN,.85],Bigl:["MakeBig",s.TEXCLASS.OPEN,1.15],biggl:["MakeBig",s.TEXCLASS.OPEN,1.45],Biggl:["MakeBig",s.TEXCLASS.OPEN,1.75],bigr:["MakeBig",s.TEXCLASS.CLOSE,.85],Bigr:["MakeBig",s.TEXCLASS.CLOSE,1.15],biggr:["MakeBig",s.TEXCLASS.CLOSE,1.45],Biggr:["MakeBig",s.TEXCLASS.CLOSE,1.75],bigm:["MakeBig",s.TEXCLASS.REL,.85],Bigm:["MakeBig",s.TEXCLASS.REL,1.15],biggm:["MakeBig",s.TEXCLASS.REL,1.45],Biggm:["MakeBig",s.TEXCLASS.REL,1.75],mathord:["TeXAtom",s.TEXCLASS.ORD],mathop:["TeXAtom",s.TEXCLASS.OP],mathopen:["TeXAtom",s.TEXCLASS.OPEN],mathclose:["TeXAtom",s.TEXCLASS.CLOSE],mathbin:["TeXAtom",s.TEXCLASS.BIN],mathrel:["TeXAtom",s.TEXCLASS.REL],mathpunct:["TeXAtom",s.TEXCLASS.PUNCT],mathinner:["TeXAtom",s.TEXCLASS.INNER],vcenter:["TeXAtom",s.TEXCLASS.VCENTER],buildrel:"BuildRel",hbox:["HBox",0],text:"HBox",mbox:["HBox",0],fbox:"FBox",strut:"Strut",mathstrut:["Macro","\\vphantom{(}"],phantom:"Phantom",vphantom:["Phantom",1,0],hphantom:["Phantom",0,1],smash:"Smash",acute:["Accent","00B4"],grave:["Accent","0060"],ddot:["Accent","00A8"],tilde:["Accent","007E"],bar:["Accent","00AF"],breve:["Accent","02D8"],check:["Accent","02C7"],hat:["Accent","005E"],vec:["Accent","2192"],dot:["Accent","02D9"],widetilde:["Accent","007E",1],widehat:["Accent","005E",1],matrix:"Matrix",array:"Matrix",pmatrix:["Matrix","(",")"],cases:["Matrix","{","","left left",null,".1em",null,!0],eqalign:["Matrix",null,null,"right left",i.TexConstant.Length.THICKMATHSPACE,".5em","D"],displaylines:["Matrix",null,null,"center",null,".5em","D"],cr:"Cr","\\":"CrLaTeX",newline:"Cr",hline:["HLine","solid"],hdashline:["HLine","dashed"],eqalignno:["Matrix",null,null,"right left",i.TexConstant.Length.THICKMATHSPACE,".5em","D",null,"right"],leqalignno:["Matrix",null,null,"right left",i.TexConstant.Length.THICKMATHSPACE,".5em","D",null,"left"],hfill:"HFill",hfil:"HFill",hfilll:"HFill",bmod:["Macro",'\\mmlToken{mo}[lspace="thickmathspace" rspace="thickmathspace"]{mod}'],pmod:["Macro","\\pod{\\mmlToken{mi}{mod}\\kern 6mu #1}",1],mod:["Macro","\\mathchoice{\\kern18mu}{\\kern12mu}{\\kern12mu}{\\kern12mu}\\mmlToken{mi}{mod}\\,\\,#1",1],pod:["Macro","\\mathchoice{\\kern18mu}{\\kern8mu}{\\kern8mu}{\\kern8mu}(#1)",1],iff:["Macro","\\;\\Longleftrightarrow\\;"],skew:["Macro","{{#2{#3\\mkern#1mu}\\mkern-#1mu}{}}",3],mathcal:["Macro","{\\cal #1}",1],mathscr:["Macro","{\\scr #1}",1],mathrm:["Macro","{\\rm #1}",1],mathbf:["Macro","{\\bf #1}",1],mathbb:["Macro","{\\bbFont #1}",1],Bbb:["Macro","{\\bbFont #1}",1],mathit:["Macro","{\\it #1}",1],mathfrak:["Macro","{\\frak #1}",1],mathsf:["Macro","{\\sf #1}",1],mathtt:["Macro","{\\tt #1}",1],textrm:["Macro","\\mathord{\\rm\\text{#1}}",1],textit:["Macro","\\mathord{\\it\\text{#1}}",1],textbf:["Macro","\\mathord{\\bf\\text{#1}}",1],textsf:["Macro","\\mathord{\\sf\\text{#1}}",1],texttt:["Macro","\\mathord{\\tt\\text{#1}}",1],pmb:["Macro","\\rlap{#1}\\kern1px{#1}",1],TeX:["Macro","T\\kern-.14em\\lower.5ex{E}\\kern-.115em X"],LaTeX:["Macro","L\\kern-.325em\\raise.21em{\\scriptstyle{A}}\\kern-.17em\\TeX"]," ":["Macro","\\text{ }"],not:"Not",dots:"Dots",space:"Tilde","\xa0":"Tilde",begin:"BeginEnd",end:"BeginEnd",label:"HandleLabel",ref:"HandleRef",nonumber:"HandleNoTag",mathchoice:"MathChoice",mmlToken:"MmlToken"},o.default);new n.EnvironmentMap("environment",a.default.environment,{array:["AlignedArray"],equation:["Equation",null,!0],"equation*":["Equation",null,!1],eqnarray:["EqnArray",null,!0,!0,"rcl","0 "+i.TexConstant.Length.THICKMATHSPACE,".5em"]},o.default);new n.CharacterMap("not_remap",null,{"\u2190":"\u219a","\u2192":"\u219b","\u2194":"\u21ae","\u21d0":"\u21cd","\u21d2":"\u21cf","\u21d4":"\u21ce","\u2208":"\u2209","\u220b":"\u220c","\u2223":"\u2224","\u2225":"\u2226","\u223c":"\u2241","~":"\u2241","\u2243":"\u2244","\u2245":"\u2247","\u2248":"\u2249","\u224d":"\u226d","=":"\u2260","\u2261":"\u2262","<":"\u226e",">":"\u226f","\u2264":"\u2270","\u2265":"\u2271","\u2272":"\u2274","\u2273":"\u2275","\u2276":"\u2278","\u2277":"\u2279","\u227a":"\u2280","\u227b":"\u2281","\u2282":"\u2284","\u2283":"\u2285","\u2286":"\u2288","\u2287":"\u2289","\u22a2":"\u22ac","\u22a8":"\u22ad","\u22a9":"\u22ae","\u22ab":"\u22af","\u227c":"\u22e0","\u227d":"\u22e1","\u2291":"\u22e2","\u2292":"\u22e3","\u22b2":"\u22ea","\u22b3":"\u22eb","\u22b4":"\u22ec","\u22b5":"\u22ed","\u2203":"\u2204"})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),r(12).add({Pcy:"\u041f",Poincareplane:"\u210c",Pr:"\u2abb",Prime:"\u2033",Proportion:"\u2237",par:"\u2225",para:"\xb6",parallel:"\u2225",parsim:"\u2af3",parsl:"\u2afd",part:"\u2202",pcy:"\u043f",percnt:"%",permil:"\u2030",perp:"\u22a5",pertenk:"\u2031",phmmat:"\u2133",phone:"\u260e",pitchfork:"\u22d4",planck:"\u210f",planckh:"\u210e",plankv:"\u210f",plus:"+",plusacir:"\u2a23",plusb:"\u229e",pluscir:"\u2a22",plusdo:"\u2214",plusdu:"\u2a25",pluse:"\u2a72",plusmn:"\xb1",plussim:"\u2a26",plustwo:"\u2a27",pm:"\xb1",pointint:"\u2a15",pound:"\xa3",pr:"\u227a",prE:"\u2ab3",prcue:"\u227c",pre:"\u2aaf",prec:"\u227a",precapprox:"\u2ab7",preccurlyeq:"\u227c",preceq:"\u2aaf",precsim:"\u227e",primes:"\u2119",prnE:"\u2ab5",prnap:"\u2ab9",prnsim:"\u22e8",prod:"\u220f",profalar:"\u232e",profline:"\u2312",profsurf:"\u2313",prop:"\u221d",propto:"\u221d",prsim:"\u227e",prurel:"\u22b0",puncsp:"\u2008"},"p")},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),r(12).add({RBarr:"\u2910",REG:"\xae",Racute:"\u0154",Rang:"\u27eb",Rarrtl:"\u2916",Rcaron:"\u0158",Rcedil:"\u0156",Rcy:"\u0420",ReverseElement:"\u220b",ReverseUpEquilibrium:"\u296f",Rho:"\u03a1",RightArrowBar:"\u21e5",RightDoubleBracket:"\u27e7",RightDownTeeVector:"\u295d",RightDownVectorBar:"\u2955",RightTeeVector:"\u295b",RightTriangleBar:"\u29d0",RightUpDownVector:"\u294f",RightUpTeeVector:"\u295c",RightUpVectorBar:"\u2954",RightVectorBar:"\u2953",RoundImplies:"\u2970",RuleDelayed:"\u29f4",rAarr:"\u21db",rArr:"\u21d2",rAtail:"\u291c",rBarr:"\u290f",rHar:"\u2964",race:"\u223d\u0331",racute:"\u0155",radic:"\u221a",raemptyv:"\u29b3",rang:"\u27e9",rangd:"\u2992",range:"\u29a5",rangle:"\u27e9",raquo:"\xbb",rarr:"\u2192",rarrap:"\u2975",rarrb:"\u21e5",rarrbfs:"\u2920",rarrc:"\u2933",rarrfs:"\u291e",rarrhk:"\u21aa",rarrlp:"\u21ac",rarrpl:"\u2945",rarrsim:"\u2974",rarrw:"\u219d",ratail:"\u291a",ratio:"\u2236",rationals:"\u211a",rbarr:"\u290d",rbbrk:"\u2773",rbrke:"\u298c",rbrksld:"\u298e",rbrkslu:"\u2990",rcaron:"\u0159",rcedil:"\u0157",rceil:"\u2309",rcub:"}",rcy:"\u0440",rdca:"\u2937",rdldhar:"\u2969",rdquo:"\u201d",rdquor:"\u201d",rdsh:"\u21b3",real:"\u211c",realine:"\u211b",realpart:"\u211c",reals:"\u211d",rect:"\u25ad",reg:"\xae",rfisht:"\u297d",rfloor:"\u230b",rhard:"\u21c1",rharu:"\u21c0",rharul:"\u296c",rightarrow:"\u2192",rightarrowtail:"\u21a3",rightharpoondown:"\u21c1",rightharpoonup:"\u21c0",rightleftarrows:"\u21c4",rightleftharpoons:"\u21cc",rightsquigarrow:"\u219d",risingdotseq:"\u2253",rlarr:"\u21c4",rlhar:"\u21cc",rlm:"\u200f",rmoustache:"\u23b1",rnmid:"\u2aee",roang:"\u27ed",roarr:"\u21fe",robrk:"\u27e7",ropar:"\u2986",roplus:"\u2a2e",rotimes:"\u2a35",rpar:")",rpargt:"\u2994",rppolint:"\u2a12",rrarr:"\u21c9",rsaquo:"\u203a",rsh:"\u21b1",rsqb:"]",rsquo:"\u2019",rsquor:"\u2019",rthree:"\u22cc",rtrie:"\u22b5",rtrif:"\u25b8",rtriltri:"\u29ce",ruluhar:"\u2968",rx:"\u211e"},"r")},function(t,e,r){"use strict";var n,i,o=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)});Object.defineProperty(e,"__esModule",{value:!0});var a=r(11),s=r(114),c=r(27);r(243);var l,u=(l=c.AbstractTags,o(h,l),h);function h(){return null!==l&&l.apply(this,arguments)||this}e.AmsTags=u;e.AmsConfiguration=a.Configuration.create("ams",{handler:{delimiter:["AMSsymbols-delimiter","AMSmath-delimiter"],macro:["AMSsymbols-mathchar0mi","AMSsymbols-mathchar0m0","AMSsymbols-delimiter","AMSsymbols-macros","AMSmath-mathchar0mo","AMSmath-macros","AMSmath-delimiter"],environment:["AMSmath-environment"]},items:(i={},i[s.MultlineItem.prototype.kind]=s.MultlineItem,i),tags:{ams:u},init:function(t){t.append(a.Configuration.extension())}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});function n(t){for(var e=[],r=0,n=t.length;r=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var o,a=r(11),s=r(9),T=r(22),w=r(116),b=r(18),c=r(3),l=(o=s.CommandMap,i(u,o),u.prototype.remove=function(t){this.map.delete(t)},u);function u(){return null!==o&&o.apply(this,arguments)||this}function A(t,e,r,n){var i,o,a,s;if(b.Package.packages.has(t.options.require.prefix+r)){var c=t.options.autoload[r],l=C(2===c.length&&Array.isArray(c[0])?c:[c,[]],2),u=l[0],h=l[1];try{for(var f=_(u),p=f.next();!p.done;p=f.next()){var d=p.value;k.remove(d)}}catch(t){i={error:t}}finally{try{p&&!p.done&&(o=f.return)&&o.call(f)}finally{if(i)throw i.error}}try{for(var m=_(h),y=m.next();!y.done;y=m.next()){var v=y.value;I.remove(v)}}catch(t){a={error:t}}finally{try{y&&!y.done&&(s=m.return)&&s.call(m)}finally{if(a)throw a.error}}t.i-=e.length+(n?0:7)}w.RequireLoad(t,r)}var k=new(e.AutoloadCommandMap=l)("autoload-macros",{},{}),I=new l("autoload-environments",{},{});e.AutoloadConfiguration=a.Configuration.create("autoload",{handler:{macro:["autoload-macros"],environment:["autoload-environments"]},options:{autoload:c.expandable({action:["toggle","mathtip","texttip"],amsCd:[[],["CD"]],bbox:["bbox"],boldsymbol:["boldsymbol"],braket:["bra","ket","braket","set","Bra","Ket","Braket","Set","ketbra","Ketbra"],cancel:["cancel","bcancel","xcancel","cancelto"],color:["color","definecolor","textcolor","colorbox","fcolorbox"],enclose:["enclose"],extpfeil:["xtwoheadrightarrow","xtwoheadleftarrow","xmapsto","xlongequal","xtofrom","Newextarrow"],html:["href","class","style","cssId"],mhchem:["ce","pu"],newcommand:["newcommand","renewcommand","newenvironment","renewenvironment","def","let"],unicode:["unicode"],verb:["verb"]})},config:function(t,e){var r,n,i,o,a,s,c=e.parseOptions,l=c.handlers.get("macro"),u=c.handlers.get("environment"),h=c.options.autoload;try{for(var f=_(Object.keys(h)),p=f.next();!p.done;p=f.next()){var d=p.value,m=h[d],y=C(2===m.length&&Array.isArray(m[0])?m:[m,[]],2),v=y[0],b=y[1];try{for(var g=(i=void 0,_(v)),M=g.next();!M.done;M=g.next()){var O=M.value;l.lookup(O)&&"color"!==O||k.add(O,new T.Macro(O,A,[d,!0]))}}catch(t){i={error:t}}finally{try{M&&!M.done&&(o=g.return)&&o.call(g)}finally{if(i)throw i.error}}try{for(var x=(a=void 0,_(b)),S=x.next();!S.done;S=x.next()){var E=S.value;u.lookup(E)||I.add(E,new T.Macro(E,A,[d,!1]))}}catch(t){a={error:t}}finally{try{S&&!S.done&&(s=x.return)&&s.call(x)}finally{if(a)throw a.error}}}}catch(t){r={error:t}}finally{try{p&&!p.done&&(n=f.return)&&n.call(f)}finally{if(r)throw r.error}}c.options.require.jax||w.RequireConfiguration.config(t,e)},configPriority:10,init:function(t){t.options.require||c.defaultOptions(t.options,w.RequireConfiguration.options)},priority:10})},function(t,e,r){"use strict";var u=this&&this.__values||function(t){var e="function"==typeof Symbol&&Symbol.iterator,r=e&&t[e],n=0;if(r)return r.call(t);if(t&&"number"==typeof t.length)return{next:function(){return t&&n>=t.length&&(t=void 0),{value:t&&t[n++],done:!t}}};throw new TypeError(e?"Object is not iterable.":"Symbol.iterator is not defined.")};Object.defineProperty(e,"__esModule",{value:!0});var n=r(11),i=r(3),o=r(9),h=r(22),f=r(72);var p=new o.CommandMap("configMacros",{},{});e.ConfigMacrosConfiguration=n.Configuration.create("configMacros",{handler:{macro:["configMacros"]},config:function(t,e){var r,n,i=t.options.macros;try{for(var o=u(Object.keys(i)),a=o.next();!a.done;a=o.next()){var s=a.value,c="string"==typeof i[s]?[i[s]]:i[s],l=Array.isArray(c[2])?new h.Macro(s,f.default.MacroWithTemplate,c.slice(0,2).concat(c[2])):new h.Macro(s,f.default.Macro,c);p.add(s,l)}}catch(t){r={error:t}}finally{try{a&&!a.done&&(n=o.return)&&n.call(o)}finally{if(r)throw r.error}}},options:{macros:i.expandable({})}})},function(t,e,r){"use strict";var n;Object.defineProperty(e,"__esModule",{value:!0});var i=r(11),o=r(118),a=r(8);r(247);e.NewcommandConfiguration=i.Configuration.create("newcommand",{handler:{macro:["Newcommand-macros"]},items:(n={},n[o.BeginEnvItem.prototype.kind]=o.BeginEnvItem,n),options:{maxMacros:1e3},init:function(t){t.handler.macro.indexOf(a.ExtensionMaps.NEW_COMMAND)<0&&t.append(i.Configuration.extension())}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(72);new(r(9).CommandMap)("Newcommand-macros",{newcommand:"NewCommand",renewcommand:"NewCommand",newenvironment:"NewEnvironment",renewenvironment:"NewEnvironment",def:"MacroDef",let:"Let"},n.default)},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var n=r(11);e.NoUndefinedConfiguration=n.Configuration.create("noundefined",{fallback:{macro:function(t,e){var r=t.create("text","\\"+e);t.Push(t.create("node","mtext",[],{mathcolor:"red"},r))}}})},function(t,e,r){"use strict";Object.defineProperty(e,"__esModule",{value:!0}),e.registerTeX=function(t){if(MathJax.startup){MathJax.startup.registerConstructor("tex",n.TeX),MathJax.startup.useInput("tex"),MathJax.config.tex||(MathJax.config.tex={});var e=MathJax.config.tex.packages;MathJax.config.tex.packages=t,e&&(0,i.insert)(MathJax.config.tex,{packages:e})}};var n=r(105),i=r(3)},function(t,e,r){"use strict";r(251);var n=r(119);MathJax.startup&&(MathJax.startup.registerConstructor("mml",n.MathML),MathJax.startup.useInput("mml"))},function(t,e,r){"use strict";var n=r(5),i=s(r(119)),o=s(r(120)),a=s(r(121));function s(t){if(t&&t.__esModule)return t;var e={};if(null!=t)for(var r in t)Object.prototype.hasOwnProperty.call(t,r)&&(e[r]=t[r]);return e.default=t,e}(0,n.combineWithMathJax)({_:{input:{mathml_ts:i,mathml:{FindMathML:o,MathMLCompile:a}}}})},function(t,e,r){"use strict";r(253);var n=r(5),i=r(122);MathJax.loader&&(0,n.combineDefaults)(MathJax.config.loader,"output/chtml",{checkReady:function(){return MathJax.loader.load("output/chtml/fonts/tex")}}),MathJax.startup&&(MathJax.startup.registerConstructor("chtml",i.CHTML),MathJax.startup.useOutput("chtml"))},function(t,e,r){"use strict";var n=r(5),i=mt(r(122)),o=mt(r(1)),a=mt(r(148)),s=mt(r(2)),c=mt(r(125)),l=mt(r(127)),u=mt(r(172)),h=mt(r(174)),f=mt(r(167)),p=mt(r(130)),d=mt(r(146)),m=mt(r(150)),y=mt(r(152)),v=mt(r(168)),b=mt(r(132)),g=mt(r(160)),M=mt(r(136)),O=mt(r(134)),x=mt(r(144)),S=mt(r(155)),E=mt(r(149)),C=mt(r(138)),_=mt(r(142)),T=mt(r(74)),w=mt(r(37)),A=mt(r(162)),k=mt(r(165)),I=mt(r(140)),L=mt(r(164)),N=mt(r(159)),P=mt(r(157)),B=mt(r(170)),R=mt(r(16)),j=mt(r(124)),H=mt(r(23)),D=mt(r(36)),X=mt(r(123)),F=mt(r(128)),W=mt(r(126)),J=mt(r(173)),q=mt(r(175)),V=mt(r(76)),U=mt(r(131)),z=mt(r(147)),G=mt(r(151)),K=mt(r(153)),Z=mt(r(169)),Y=mt(r(133)),$=mt(r(161)),Q=mt(r(137)),tt=mt(r(135)),et=mt(r(145)),rt=mt(r(156)),nt=mt(r(73)),it=mt(r(139)),ot=mt(r(143)),at=mt(r(154)),st=mt(r(38)),ct=mt(r(163)),lt=mt(r(166)),ut=mt(r(141)),ht=mt(r(75)),ft=mt(r(39)),pt=mt(r(158)),dt=mt(r(171));function mt(t){if(t&&t.__esModule)return t;var e={};if(null!=t)for(var r in t)Object.prototype.hasOwnProperty.call(t,r)&&(e[r]=t[r]);return e.default=t,e}(0,n.combineWithMathJax)({_:{output:{chtml_ts:i,chtml:{FontData:o,Notation:a,Wrapper:s,WrapperFactory:c,Wrappers_ts:l,Wrappers:{TeXAtom:u,TextNode:h,maction:f,math:p,menclose:d,mfenced:m,mfrac:y,mglyph:v,mi:b,mmultiscripts:g,mn:M,mo:O,mpadded:x,mroot:S,mrow:E,ms:C,mspace:_,msqrt:T,msubsup:w,mtable:A,mtd:k,mtext:I,mtr:L,munderover:N,scriptbase:P,semantics:B}},common:{BBox:R,CssStyles:j,FontData:H,Notation:D,OutputJax:X,Wrapper:F,WrapperFactory:W,Wrappers:{TeXAtom:J,TextNode:q,maction:V,math:U,menclose:z,mfenced:G,mfrac:K,mglyph:Z,mi:Y,mmultiscripts:$,mn:Q,mo:tt,mpadded:et,mroot:rt,mrow:nt,ms:it,mspace:ot,msqrt:at,msubsup:st,mtable:ct,mtd:lt,mtext:ut,mtr:ht,munderover:ft,scriptbase:pt,semantics:dt}}}}})},function(t,e,r){"use strict";var n,i=this&&this.__extends||(n=function(t,e){return(n=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,e){t.__proto__=e}||function(t,e){for(var r in e)e.hasOwnProperty(r)&&(t[r]=e[r])})(t,e)},function(t,e){function r(){this.constructor=t}n(t,e),t.prototype=null===e?Object.create(e):(r.prototype=e.prototype,new r)}),o=this&&this.__assign||function(){return(o=Object.assign||function(t){for(var e,r=1,n=arguments.length;rdocument.body.offsetWidth-5&&(n=document.body.offsetWidth-l.offsetWidth-5),this.post(n,i)},kt.prototype.registerWidget=function(t){this.widgets.push(t)},kt.prototype.unregisterWidget=function(t){var e=this.widgets.indexOf(t);-1document.body.offsetWidth-5&&(i=Math.max(5,i-n-e.offsetWidth+6)),I.prototype.post.call(this,i,o)}},It.prototype.display=function(){this.baseMenu.getFrame().appendChild(this.getHtml())},It.prototype.setBaseMenu=function(){for(var t=this;(t=t.anchor.getMenu())instanceof It;);this.baseMenu=t},L=It,k.SubMenu=L,function(t){t.close=function(t){var e=t.getMenu();e instanceof N.SubMenu?e.baseMenu.unpost():e.unpost()},t.getActiveElement=function(t){var e=t.getMenu();return(e instanceof N.SubMenu?e.baseMenu:e).getStore().getActive()},t.error=function(t,e){console.log("ContextMenu Error: "+e)},t.counter=function(){return e++};var e=0}((N=vt=vt||{}).MenuUtil||(N.MenuUtil={})),P=vt=vt||{},B=P.AbstractEntry,bt(Lt,B),Object.defineProperty(Lt.prototype,"content",{get:function(){return this._content},set:function(t){this._content=t,this.generateHtml(),this.getMenu()&&this.getMenu().generateHtml()},enumerable:!0,configurable:!0}),Lt.prototype.getId=function(){return this.id},Lt.prototype.press=function(){this.disabled||(this.executeAction(),this.executeCallbacks_())},Lt.prototype.executeAction=function(){},Lt.prototype.registerCallback=function(t){-1===this.callbacks.indexOf(t)&&this.callbacks.push(t)},Lt.prototype.unregisterCallback=function(t){var e=this.callbacks.indexOf(t);-1!==e&&this.callbacks.splice(e,1)},Lt.prototype.mousedown=function(t){this.press(),this.stop(t)},Lt.prototype.mouseover=function(t){this.focus(),this.stop(t)},Lt.prototype.mouseout=function(t){this.deactivate(),this.stop(t)},Lt.prototype.generateHtml=function(){B.prototype.generateHtml.call(this);var t=this.getHtml();t.setAttribute("aria-disabled","false"),t.textContent=this.content},Lt.prototype.activate=function(){this.disabled||this.getHtml().classList.add(P.HtmlClasses.MENUACTIVE)},Lt.prototype.deactivate=function(){this.getHtml().classList.remove(P.HtmlClasses.MENUACTIVE)},Lt.prototype.focus=function(){this.getMenu().setFocused(this),B.prototype.focus.call(this),this.activate()},Lt.prototype.unfocus=function(){this.deactivate(),B.prototype.unfocus.call(this)},Lt.prototype.escape=function(t){P.MenuUtil.close(this)},Lt.prototype.up=function(t){this.getMenu().up(t)},Lt.prototype.down=function(t){this.getMenu().down(t)},Lt.prototype.left=function(t){if(this.getMenu()instanceof P.ContextMenu)this.getMenu().left(t);else{var e=this.getMenu();e.setFocused(null),e.getAnchor().focus()}},Lt.prototype.right=function(t){this.getMenu().right(t)},Lt.prototype.space=function(t){this.press()},Lt.prototype.disable=function(){this.disabled=!0;var t=this.getHtml();t.classList.add(P.HtmlClasses.MENUDISABLED),t.setAttribute("aria-disabled","true")},Lt.prototype.enable=function(){this.disabled=!1;var t=this.getHtml();t.classList.remove(P.HtmlClasses.MENUDISABLED),t.removeAttribute("aria-disabled")},Lt.prototype.executeCallbacks_=function(){P.MenuUtil.getActiveElement(this);for(var t=0,e=this.callbacks;t'+this.title+''),r.write("
"+this.generateContent()+"
"),r.write('
'),r.write(""),r.close()):(r.open(),r.write(""+this.title+''),r.write("
"+this.generateContent()+"
"),r.write(""),r.close(),setTimeout(this.resize.bind(this),50))},Jt.prototype.unpost=function(){this.windowList.forEach(function(t){return t.close()}),this.window=null},Jt.prototype.generateContent=function(){return this.content(this.active)},Jt.prototype.resize=function(){var t=this.window.document.body.firstChild,e=this.window.outerHeight-this.window.innerHeight||30,r=this.window.outerWidth-this.window.innerWidth||30;r=Math.max(140,Math.min(Math.floor(.5*this.window.screen.width),t.offsetWidth+r+25)),e=Math.max(40,Math.min(Math.floor(.5*this.window.screen.height),t.offsetHeight+e+25)),this.window.resizeTo(r,e);var n=this.active.getBoundingClientRect();if(n){var i=Math.max(0,Math.min(n.right-Math.floor(r/2),this.window.screen.width-r-20)),o=Math.max(0,Math.min(n.bottom-Math.floor(e/2),this.window.screen.height-e-20));this.window.moveTo(i,o)}this.active=null},Jt.popupSettings={status:"no",toolbar:"no",locationbar:"no",menubar:"no",directories:"no",personalbar:"no",resizable:"yes",scrollbars:"yes",width:400,height:300},yt=Jt,dt.Popup=yt,(vt=vt||{}).TOUCH={START:"touchstart",MOVE:"touchmove",END:"touchend",CANCEL:"touchcancel"}},function(t,e,r){"use strict";var n=r(5),i=l(r(225)),o=l(r(226)),a=l(r(229)),s=l(r(227)),c=l(r(228));function l(t){if(t&&t.__esModule)return t;var e={};if(null!=t)for(var r in t)Object.prototype.hasOwnProperty.call(t,r)&&(e[r]=t[r]);return e.default=t,e}(0,n.combineWithMathJax)({_:{ui:{menu:{MJContextMenu:i,Menu:o,MenuHandler:a,MmlVisitor:s,SelectableInfo:c}}}})},function(t,e,r){"use strict";r(78);var n=r(17),i=r(5),o=r(262);(0,i.combineDefaults)(MathJax.config.loader,"dependencies",o.dependencies),(0,i.combineDefaults)(MathJax.config.loader,"paths",o.paths),(0,i.combineDefaults)(MathJax.config.loader,"provides",o.provides),n.Loader.preLoad("loader"),n.Loader.load.apply(n.Loader,function(t){if(Array.isArray(t)){for(var e=0,r=Array(t.length);eli { - margin-left: 22px; -} - -ol>li { - margin-left: 27.2px; -} - -li>*:first-child { - margin-top: 0 -} - -/* Text alignements, this should be forbidden. */ - -.left { - text-align: left; -} - -.right { - text-align: right; -} - -.center { - text-align: center; -} - -/* Links and anchors */ - -a { - text-decoration: none; - color: var(--link-color); -} - -a:hover { - box-shadow: 0 1px 0 0 var(--link-color); -} - -/* Linked highlight */ -*:target { - background-color: var(--target-background) !important; - box-shadow: 0 0px 0 1px var(--target-shadow) !important; - border-radius: 1px; -} - -*:hover > a.anchor { - visibility: visible; -} - -a.anchor:before { - content: "#"; -} - -a.anchor:hover { - box-shadow: none; - text-decoration: none; - color: var(--anchor-hover); -} - -a.anchor { - visibility: hidden; - position: absolute; - /* top: 0px; */ - /* margin-left: -3ex; */ - margin-left: -1.3em; - font-weight: normal; - font-style: normal; - padding-right: 0.4em; - padding-left: 0.4em; - /* To remain selectable */ - color: var(--anchor-color); -} - -.spec > a.anchor { - margin-left: -2.3em; - padding-right: 0.9em; -} - -.xref-unresolved { - color: #2C94BD; -} -.xref-unresolved:hover { - box-shadow: 0 1px 0 0 var(--xref-shadow); -} - -/* Section and document divisions. - Until at least 4.03 many of the modules of the stdlib start at .h7, - we restart the sequence there like h2 */ - -h1, h2, h3, h4, h5, h6, .h7, .h8, .h9, .h10 { - font-family: "Fira Sans", Helvetica, Arial, sans-serif; - font-weight: 400; - padding-top: 0.1em; - line-height: 1.2; - overflow-wrap: break-word; -} - -h1 { - font-weight: 500; - font-size: 2.441em; -} - -h1 { - font-weight: 500; - font-size: 1.953em; - box-shadow: 0 1px 0 0 var(--header-shadow); -} - -h2 { - font-size: 1.563em; -} - -h3 { - font-size: 1.25em; -} - -small, .font_small { - font-size: 0.8em; -} - -h1 code, h1 tt { - font-size: inherit; - font-weight: inherit; -} - -h2 code, h2 tt { - font-size: inherit; - font-weight: inherit; -} - -h3 code, h3 tt { - font-size: inherit; - font-weight: inherit; -} - -h3 code, h3 tt { - font-size: inherit; - font-weight: inherit; -} - -h4 { - font-size: 1.12em; -} - -/* Comment delimiters, hidden but accessible to screen readers and - selected for copy/pasting */ - -/* Taken from bootstrap */ -/* See also https://stackoverflow.com/a/27769435/4220738 */ -.comment-delim { - position: absolute; - width: 1px; - height: 1px; - padding: 0; - margin: -1px; - overflow: hidden; - clip: rect(0, 0, 0, 0); - white-space: nowrap; - border: 0; -} - -/* Preformatted and code */ - -tt, code, pre { - font-family: "Fira Mono", courier; - font-weight: 400; -} - -pre { - padding: 0.1em; - border: 1px solid var(--pre-border-color); - border-radius: 5px; - overflow-x: auto; -} - -p code, -li code { - background-color: var(--li-code-background); - color: var(--li-code-color); - border-radius: 3px; - padding: 0 0.3ex; -} - -p a > code { - color: var(--link-color); -} - -code { - white-space: pre-wrap; -} - -/* Code blocks (e.g. Examples) */ - -pre code { - font-size: 0.893rem; -} - -/* Code lexemes */ - -.keyword { - font-weight: 500; -} - -.arrow { white-space: nowrap } - -/* Module member specification */ - -.spec { - background-color: var(--spec-summary-background); - border-radius: 3px; - border-left: 4px solid var(--spec-summary-border-color); - border-right: 5px solid transparent; - padding: 0.35em 0.5em; -} - -li:not(:last-child) > .def-doc { - margin-bottom: 15px; -} - -/* Spacing between items */ -div.odoc-spec,.odoc-include { - margin-bottom: 2em; -} - -.spec.type .variant p, .spec.type .record p { - margin: 5px; -} - -.spec.type .variant, .spec.type .record { - margin-left: 2ch; - list-style: none; - display: flex; - flex-wrap: wrap; - row-gap: 4px; -} - -.spec.type .record > code, .spec.type .variant > code { - min-width: 40%; -} - -.spec.type > ol { - margin-top: 0; - margin-bottom: 0; -} - -.spec.type .record > .def-doc, .spec.type .variant > .def-doc { - min-width:50%; - padding: 0.25em 0.5em; - margin-left: 10%; - border-radius: 3px; - flex-grow:1; - background: var(--main-background); - box-shadow: 2px 2px 4px lightgrey; -} - -div.def { - margin-top: 0; - text-indent: -2ex; - padding-left: 2ex; -} - -div.def-doc>*:first-child { - margin-top: 0; -} - -/* Collapsible inlined include and module */ - -.odoc-include details { - position: relative; -} - -.odoc-include.shadowed-include { - display: none; -} - -.odoc-include details:after { - z-index: -100; - display: block; - content: " "; - position: absolute; - border-radius: 0 1ex 1ex 0; - right: -20px; - top: 1px; - bottom: 1px; - width: 15px; - background: var(--spec-details-after-background, rgba(0, 4, 15, 0.05)); - box-shadow: 0 0px 0 1px var(--spec-details-after-shadow, rgba(204, 204, 204, 0.53)); -} - -.odoc-include summary { - position: relative; - margin-bottom: 1em; - cursor: pointer; - outline: none; -} - -.odoc-include summary:hover { - background-color: var(--spec-summary-hover-background); -} - -/* FIXME: Does not work in Firefox. */ -.odoc-include summary::-webkit-details-marker { - color: #888; - transform: scaleX(-1); - position: absolute; - top: calc(50% - 5px); - height: 11px; - right: -29px; -} - -/* Records and variants FIXME */ - -div.def table { - text-indent: 0em; - padding: 0; - margin-left: -2ex; -} - -td.def { - padding-left: 2ex; -} - -td.def-doc *:first-child { - margin-top: 0em; -} - -/* Lists of @tags */ - -.at-tags { list-style-type: none; margin-left: -3ex; } -.at-tags li { padding-left: 3ex; text-indent: -3ex; } -.at-tags .at-tag { text-transform: capitalize } - -/* Lists of modules */ - -.modules { list-style-type: none; margin-left: -3ex; } -.modules li { padding-left: 3ex; text-indent: -3ex; margin-top: 5px } -.modules .synopsis { padding-left: 1ch; } - -/* Odig package index */ - -.packages { list-style-type: none; margin-left: -3ex; } -.packages li { padding-left: 3ex; text-indent: -3ex } -.packages li a.anchor { padding-right: 0.5ch; padding-left: 3ch; } -.packages .version { font-size: 10px; color: var(--by-name-version-color); } -.packages .synopsis { padding-left: 1ch } - -.by-name nav a { - text-transform: uppercase; - font-size: 18px; - margin-right: 1ex; - color: var(--by-name-nav-link-color,); - display: inline-block; -} - -.by-tag nav a { - margin-right: 1ex; - color: var(--by-name-nav-link-color); - display: inline-block; -} - -.by-tag ol { list-style-type: none; } -.by-tag ol.tags li { margin-left: 1ch; display: inline-block } -.by-tag td:first-child { text-transform: uppercase; } - -/* Odig package page */ - -.package nav { - display: inline; - font-size: 14px; - font-weight: normal; -} - -.package .version { - font-size: 14px; -} - -.package.info { - margin: 0; -} - -.package.info td:first-child { - font-style: italic; - padding-right: 2ex; -} - -.package.info ul { - list-style-type: none; - display: inline; - margin: 0; -} - -.package.info li { - display: inline-block; - margin: 0; - margin-right: 1ex; -} - -#info-authors li, #info-maintainers li { - display: block; -} - -/* Sidebar and TOC */ - -.odoc-toc:before { - display: block; - content: "Contents"; - text-transform: uppercase; - font-size: 1em; - margin: 1.414em 0 0.5em; - font-weight: 500; - color: var(--toc-before-color); - line-height: 1.2; -} - -.odoc-toc { - position: fixed; - top: 0px; - bottom: 0px; - left: 0px; - max-width: 30ex; - min-width: 26ex; - width: 20%; - background: var(--toc-background); - overflow: auto; - color: var(--toc-color); - padding-left: 2ex; - padding-right: 2ex; -} - -.odoc-toc ul li a { - font-family: "Fira Sans", sans-serif; - font-size: 0.95em; - color: var(--color); - font-weight: 400; - line-height: 1.6em; - display: block; -} - -.odoc-toc ul li a:hover { - box-shadow: none; - text-decoration: underline; -} - -/* First level titles */ - -.odoc-toc>ul>li>a { - font-weight: 500; -} - -.odoc-toc li ul { - margin: 0px; -} - -.odoc-toc ul { - list-style-type: none; -} - -.odoc-toc ul li { - margin: 0; -} -.odoc-toc>ul>li { - margin-bottom: 0.3em; -} - -.odoc-toc ul li li { - border-left: 1px solid var(--toc-list-border); - margin-left: 5px; - padding-left: 12px; -} - -/* Mobile adjustements. */ - -@media only screen and (max-width: 95ex) { - body.odoc { - margin: 2em; - } - .odoc-toc { - position: static; - width: auto; - min-width: unset; - max-width: unset; - border: none; - padding: 0.2em 1em; - border-radius: 5px; - margin-bottom: 2em; - } -} - -/* Print adjustements. */ - -@media print { - body { - color: black; - background: white; - } - body nav:first-child { - visibility: hidden; - } -} - -/* Syntax highlighting (based on github-gist) */ - -.hljs { - display: block; - background: var(--code-background); - padding: 0.5em; - color: var(--color); - overflow-x: auto; -} - -.hljs-comment, -.hljs-meta { - color: #969896; -} - -.hljs-string, -.hljs-variable, -.hljs-template-variable, -.hljs-strong, -.hljs-emphasis, -.hljs-quote { - color: #df5000; -} - -.hljs-keyword, -.hljs-selector-tag { - color: #a71d5d; -} - -.hljs-type, -.hljs-class .hljs-title { - color: #458; - font-weight: 500; -} - -.hljs-literal, -.hljs-symbol, -.hljs-bullet, -.hljs-attribute { - color: #0086b3; -} - -.hljs-section, -.hljs-name { - color: #63a35c; -} - -.hljs-tag { - color: #333333; -} - -.hljs-attr, -.hljs-selector-id, -.hljs-selector-class, -.hljs-selector-attr, -.hljs-selector-pseudo { - color: #795da3; -} - -.hljs-addition { - color: #55a532; - background-color: #eaffea; -} - -.hljs-deletion { - color: #bd2c00; - background-color: #ffecec; -} - -.hljs-link { - text-decoration: underline; -} - -/*--------------------------------------------------------------------------- - Copyright (c) 2016 The odoc contributors - - Permission to use, copy, modify, and/or distribute this software for any - purpose with or without fee is hereby granted, provided that the above - copyright notice and this permission notice appear in all copies. - - THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES - WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF - MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR - ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES - WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN - ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF - OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - ---------------------------------------------------------------------------*/ diff --git a/owl-base/Owl_algodiff_check/.dummy b/owl-base/Owl_algodiff_check/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_algodiff_check/Make/Forward/index.html b/owl-base/Owl_algodiff_check/Make/Forward/index.html deleted file mode 100644 index 4a3ffd1c2..000000000 --- a/owl-base/Owl_algodiff_check/Make/Forward/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Forward (owl-base.Owl_algodiff_check.Make.Forward)

Module Make.Forward

val check : - threshold:float -> - f:(AD.t -> AD.t) -> - directions:AD.t array -> - AD.t array -> - bool * int
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/Reverse/index.html b/owl-base/Owl_algodiff_check/Make/Reverse/index.html deleted file mode 100644 index a18757390..000000000 --- a/owl-base/Owl_algodiff_check/Make/Reverse/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Reverse (owl-base.Owl_algodiff_check.Make.Reverse)

Module Make.Reverse

val check : - threshold:float -> - order:[ `second | `fourth | `eighth ] -> - ?verbose:bool -> - ?eps:float -> - f:(AD.t -> AD.t) -> - directions:AD.t array -> - AD.t array -> - bool * int
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/A/Linalg/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/A/Linalg/index.html deleted file mode 100644 index c2428b0f9..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_check.Make.AD.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/A/Mat/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/A/Mat/index.html deleted file mode 100644 index 508343f61..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_check.Make.AD.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/A/Scalar/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/A/Scalar/index.html deleted file mode 100644 index eedbddc1b..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_algodiff_check.Make.AD.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/A/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/A/index.html deleted file mode 100644 index 2155681f4..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_algodiff_check.Make.AD.A)

Module AD.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Arr/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/Arr/index.html deleted file mode 100644 index 9401b46bf..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_algodiff_check.Make.AD.Arr)

Module AD.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/index.html deleted file mode 100644 index 10a588b48..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_algodiff_check.Make.AD.Builder)

Module AD.Builder

Ops Builder
module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t

build array input single output operations

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Aiso/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Aiso/index.html deleted file mode 100644 index be76e3c7a..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_algodiff_check.Make.AD.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Piso/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Piso/index.html deleted file mode 100644 index e41963cd5..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_algodiff_check.Make.AD.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Siao/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Siao/index.html deleted file mode 100644 index 153a448d4..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_algodiff_check.Make.AD.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Sipo/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Sipo/index.html deleted file mode 100644 index b495d93a1..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_algodiff_check.Make.AD.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Siso/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Siso/index.html deleted file mode 100644 index 5ac19edf7..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_algodiff_check.Make.AD.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Sito/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Sito/index.html deleted file mode 100644 index bed87a578..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_algodiff_check.Make.AD.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Linalg/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/Linalg/index.html deleted file mode 100644 index 613ca524c..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_check.Make.AD.Linalg)

Module AD.Linalg

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val logdet : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val chol : ?upper:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val qr : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val lq : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val svd : ?thin:bool -> t -> t * t * t

Refer to :doc:`owl_dense_ndarray_generic`

val sylvester : t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val lyapunov : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - t -> - t -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val (/@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val care : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dare : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Mat/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/Mat/index.html deleted file mode 100644 index c105ea5fb..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_check.Make.AD.Mat)

Module AD.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Maths/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/Maths/index.html deleted file mode 100644 index e5a89392e..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_algodiff_check.Make.AD.Maths)

Module AD.Maths

val (+) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (-) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (/) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (**) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val add : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val div : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val kron : t -> t -> t

Refer to :doc:`owl_dense_matrix_generic`

val dot : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cross_entropy : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val abs : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val round : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mean : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val swap : int -> int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_item : t -> int -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_row : t -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val concat : axis:int -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val split : axis:int -> int array -> t -> t array

Refer to :doc:`owl_dense_ndarray_generic`

val of_arrays : t array array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val to_arrays : t -> t array array

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diag : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diagm : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val trace : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val triu : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tril : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/NN/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/NN/index.html deleted file mode 100644 index 75ebdf794..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl-base.Owl_algodiff_check.Make.AD.NN)

Module AD.NN

val dropout : ?rate:float -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:A.elt -> int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/argument-1-AD/index.html b/owl-base/Owl_algodiff_check/Make/argument-1-AD/index.html deleted file mode 100644 index f7adbfccd..000000000 --- a/owl-base/Owl_algodiff_check/Make/argument-1-AD/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -AD (owl-base.Owl_algodiff_check.Make.AD)

Parameter Make.AD

include Owl_algodiff_core_sig.Sig
Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

val make_forward : t -> t -> int -> t

TODO

val make_reverse : t -> int -> t

TODO

val reverse_prop : t -> t -> unit

TODO

val diff : (t -> t) -> t -> t

diff f x returns the exat derivative of a function f : scalar -> scalar at point x. Simply calling diff f will return its derivative function g of the same type, i.e. g : scalar -> scalar.

Keep calling this function will give you higher-order derivatives of f, i.e. f |> diff |> diff |> diff |> ...

val diff' : (t -> t) -> t -> t * t

similar to diff, but return (f x, diff f x).

val grad : (t -> t) -> t -> t

gradient of f : (vector -> scalar) at x, reverse ad.

val grad' : (t -> t) -> t -> t * t

similar to grad, but return (f x, grad f x).

val jacobian : (t -> t) -> t -> t

jacobian of f : (vector -> vector) at x, both x and y are row vectors.

val jacobian' : (t -> t) -> t -> t * t

similar to jacobian, but return (f x, jacobian f x)

val jacobianv : (t -> t) -> t -> t -> t

jacobian vector product of f : (vector -> vector) at x along v, forward ad. Namely, it calcultes (jacobian x) v

val jacobianv' : (t -> t) -> t -> t -> t * t

similar to jacobianv', but return (f x, jacobianv f x v)

val jacobianTv : (t -> t) -> t -> t -> t

transposed jacobian vector product of f : (vector -> vector) at x along v, backward ad. Namely, it calculates transpose ((jacobianv f x v)).

val jacobianTv' : (t -> t) -> t -> t -> t * t

similar to jacobianTv, but return (f x, transpose (jacobianv f x v))

val hessian : (t -> t) -> t -> t

hessian of f : (scalar -> scalar) at x.

val hessian' : (t -> t) -> t -> t * t

simiarl to hessian, but return (f x, hessian f x)

val hessianv : (t -> t) -> t -> t -> t

hessian vector product of f : (scalar -> scalar) at x along v. Namely, it calculates (hessian x) v.

val hessianv' : (t -> t) -> t -> t -> t * t

similar to hessianv, but return (f x, hessianv f x v).

val laplacian : (t -> t) -> t -> t

laplacian of f : (scalar -> scalar) at x.

val laplacian' : (t -> t) -> t -> t * t

similar to laplacian, but return (f x, laplacian f x).

val gradhessian : (t -> t) -> t -> t * t

return (grad f x, hessian f x), f : (scalar -> scalar)

val gradhessian' : (t -> t) -> t -> t * t * t

return (f x, grad f x, hessian f x)

val gradhessianv : (t -> t) -> t -> t -> t * t

return (grad f x v, hessian f x v)

val gradhessianv' : (t -> t) -> t -> t -> t * t * t

return (f x, grad f x v, hessian f x v)

include Owl_algodiff_ops_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
module Builder : - Owl_algodiff_ops_builder_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
Supported Maths functions
module Maths : sig ... end
Supported Linalg functions
module Linalg : sig ... end
Supported Neural Network functions
module NN : sig ... end
Supported Mat functions
module Mat : sig ... end
Supported Arr functions
module Arr : sig ... end
Helper functions
include Owl_algodiff_graph_convert_sig.Sig with type t := t
val to_trace : t list -> string

to_trace [t0; t1; ...] outputs the trace of computation graph on the terminal in a human-readable format.

val to_dot : t list -> string

to_dot [t0; t1; ...] outputs the trace of computation graph in the dot file format which you can use other tools further visualisation, such as Graphviz.

val pp_num : Stdlib.Format.formatter -> t -> unit

pp_num t pretty prints the abstract number used in Algodiff.

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_check/Make/index.html b/owl-base/Owl_algodiff_check/Make/index.html deleted file mode 100644 index b1163cfd1..000000000 --- a/owl-base/Owl_algodiff_check/Make/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make (owl-base.Owl_algodiff_check.Make)

Module Owl_algodiff_check.Make

Parameters

Signature

val generate_test_samples : (int * int) -> int -> AD.t array * AD.t array
module Reverse : sig ... end
module Forward : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_core/.dummy b/owl-base/Owl_algodiff_core/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_algodiff_core/Make/A/Linalg/index.html b/owl-base/Owl_algodiff_core/Make/A/Linalg/index.html deleted file mode 100644 index 10704de9e..000000000 --- a/owl-base/Owl_algodiff_core/Make/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_core.Make.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_core/Make/A/Mat/index.html b/owl-base/Owl_algodiff_core/Make/A/Mat/index.html deleted file mode 100644 index 2e8eed362..000000000 --- a/owl-base/Owl_algodiff_core/Make/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_core.Make.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_core/Make/A/Scalar/index.html b/owl-base/Owl_algodiff_core/Make/A/Scalar/index.html deleted file mode 100644 index 0d728dc9b..000000000 --- a/owl-base/Owl_algodiff_core/Make/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_algodiff_core.Make.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_core/Make/A/index.html b/owl-base/Owl_algodiff_core/Make/A/index.html deleted file mode 100644 index 1cbd93c25..000000000 --- a/owl-base/Owl_algodiff_core/Make/A/index.html +++ /dev/null @@ -1,160 +0,0 @@ - -A (owl-base.Owl_algodiff_core.Make.A)

Module Make.A

include Owl_types_ndarray_eltcmp.Sig - with type arr = A.arr - with type elt = A.elt
include Owl_types_ndarray_basic.Sig with type arr = A.arr with type elt = A.elt
type arr = A.arr
type elt = A.elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_core/Make/argument-1-A/Linalg/index.html b/owl-base/Owl_algodiff_core/Make/argument-1-A/Linalg/index.html deleted file mode 100644 index 10704de9e..000000000 --- a/owl-base/Owl_algodiff_core/Make/argument-1-A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_core.Make.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_core/Make/argument-1-A/Mat/index.html b/owl-base/Owl_algodiff_core/Make/argument-1-A/Mat/index.html deleted file mode 100644 index 2e8eed362..000000000 --- a/owl-base/Owl_algodiff_core/Make/argument-1-A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_core.Make.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_core/Make/argument-1-A/Scalar/index.html b/owl-base/Owl_algodiff_core/Make/argument-1-A/Scalar/index.html deleted file mode 100644 index 0d728dc9b..000000000 --- a/owl-base/Owl_algodiff_core/Make/argument-1-A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_algodiff_core.Make.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_core/Make/argument-1-A/index.html b/owl-base/Owl_algodiff_core/Make/argument-1-A/index.html deleted file mode 100644 index 7829e15fa..000000000 --- a/owl-base/Owl_algodiff_core/Make/argument-1-A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_algodiff_core.Make.A)

Parameter Make.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_core/Make/index.html b/owl-base/Owl_algodiff_core/Make/index.html deleted file mode 100644 index 37085ca09..000000000 --- a/owl-base/Owl_algodiff_core/Make/index.html +++ /dev/null @@ -1,3 +0,0 @@ - -Make (owl-base.Owl_algodiff_core.Make)

Module Owl_algodiff_core.Make

Parameters

Signature

module A : - Owl_types_ndarray_algodiff.Sig with type arr = A.arr with type elt = A.elt
Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_core_sig/.dummy b/owl-base/Owl_algodiff_core_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_algodiff_core_sig/module-type-Sig/A/Linalg/index.html b/owl-base/Owl_algodiff_core_sig/module-type-Sig/A/Linalg/index.html deleted file mode 100644 index 8d9fe56f4..000000000 --- a/owl-base/Owl_algodiff_core_sig/module-type-Sig/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_core_sig.Sig.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_core_sig/module-type-Sig/A/Mat/index.html b/owl-base/Owl_algodiff_core_sig/module-type-Sig/A/Mat/index.html deleted file mode 100644 index 1631b7e32..000000000 --- a/owl-base/Owl_algodiff_core_sig/module-type-Sig/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_core_sig.Sig.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_core_sig/module-type-Sig/A/Scalar/index.html b/owl-base/Owl_algodiff_core_sig/module-type-Sig/A/Scalar/index.html deleted file mode 100644 index c8488197b..000000000 --- a/owl-base/Owl_algodiff_core_sig/module-type-Sig/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_algodiff_core_sig.Sig.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_core_sig/module-type-Sig/A/index.html b/owl-base/Owl_algodiff_core_sig/module-type-Sig/A/index.html deleted file mode 100644 index b3ff90f1f..000000000 --- a/owl-base/Owl_algodiff_core_sig/module-type-Sig/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_algodiff_core_sig.Sig.A)

Module Sig.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_core_sig/module-type-Sig/index.html b/owl-base/Owl_algodiff_core_sig/module-type-Sig/index.html deleted file mode 100644 index 2c1bdd676..000000000 --- a/owl-base/Owl_algodiff_core_sig/module-type-Sig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Sig (owl-base.Owl_algodiff_core_sig.Sig)

Module type Owl_algodiff_core_sig.Sig

Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/.dummy b/owl-base/Owl_algodiff_generic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_algodiff_generic/Make/A/Linalg/index.html b/owl-base/Owl_algodiff_generic/Make/A/Linalg/index.html deleted file mode 100644 index 8fae43576..000000000 --- a/owl-base/Owl_algodiff_generic/Make/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_generic.Make.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/A/Mat/index.html b/owl-base/Owl_algodiff_generic/Make/A/Mat/index.html deleted file mode 100644 index d8c2c71b5..000000000 --- a/owl-base/Owl_algodiff_generic/Make/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_generic.Make.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/A/Scalar/index.html b/owl-base/Owl_algodiff_generic/Make/A/Scalar/index.html deleted file mode 100644 index a5560ac64..000000000 --- a/owl-base/Owl_algodiff_generic/Make/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_algodiff_generic.Make.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/A/index.html b/owl-base/Owl_algodiff_generic/Make/A/index.html deleted file mode 100644 index 976c47b88..000000000 --- a/owl-base/Owl_algodiff_generic/Make/A/index.html +++ /dev/null @@ -1,160 +0,0 @@ - -A (owl-base.Owl_algodiff_generic.Make.A)

Module Make.A

include Owl_types_ndarray_eltcmp.Sig - with type arr = A.arr - with type elt = A.elt
include Owl_types_ndarray_basic.Sig with type arr = A.arr with type elt = A.elt
type arr = A.arr
type elt = A.elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/Arr/index.html b/owl-base/Owl_algodiff_generic/Make/Arr/index.html deleted file mode 100644 index e87e36a63..000000000 --- a/owl-base/Owl_algodiff_generic/Make/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_algodiff_generic.Make.Arr)

Module Make.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/Builder/index.html b/owl-base/Owl_algodiff_generic/Make/Builder/index.html deleted file mode 100644 index 652c037ef..000000000 --- a/owl-base/Owl_algodiff_generic/Make/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_algodiff_generic.Make.Builder)

Module Make.Builder

Ops Builder
module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t

build array input single output operations

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Aiso/index.html b/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Aiso/index.html deleted file mode 100644 index 8675a03ab..000000000 --- a/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_algodiff_generic.Make.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Piso/index.html b/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Piso/index.html deleted file mode 100644 index 76b671e0c..000000000 --- a/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_algodiff_generic.Make.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Siao/index.html b/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Siao/index.html deleted file mode 100644 index 5813e0f09..000000000 --- a/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_algodiff_generic.Make.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Sipo/index.html b/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Sipo/index.html deleted file mode 100644 index db2e52c3c..000000000 --- a/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_algodiff_generic.Make.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Siso/index.html b/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Siso/index.html deleted file mode 100644 index d9383c7f3..000000000 --- a/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_algodiff_generic.Make.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Sito/index.html b/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Sito/index.html deleted file mode 100644 index 7473f46e0..000000000 --- a/owl-base/Owl_algodiff_generic/Make/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_algodiff_generic.Make.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/Linalg/index.html b/owl-base/Owl_algodiff_generic/Make/Linalg/index.html deleted file mode 100644 index e54f9105e..000000000 --- a/owl-base/Owl_algodiff_generic/Make/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_generic.Make.Linalg)

Module Make.Linalg

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val logdet : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val chol : ?upper:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val qr : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val lq : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val svd : ?thin:bool -> t -> t * t * t

Refer to :doc:`owl_dense_ndarray_generic`

val sylvester : t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val lyapunov : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - t -> - t -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val (/@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val care : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dare : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/Mat/index.html b/owl-base/Owl_algodiff_generic/Make/Mat/index.html deleted file mode 100644 index 6977c26d8..000000000 --- a/owl-base/Owl_algodiff_generic/Make/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_generic.Make.Mat)

Module Make.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/Maths/index.html b/owl-base/Owl_algodiff_generic/Make/Maths/index.html deleted file mode 100644 index 901f53374..000000000 --- a/owl-base/Owl_algodiff_generic/Make/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_algodiff_generic.Make.Maths)

Module Make.Maths

val (+) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (-) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (/) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (**) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val add : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val div : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val kron : t -> t -> t

Refer to :doc:`owl_dense_matrix_generic`

val dot : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cross_entropy : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val abs : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val round : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mean : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val swap : int -> int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_item : t -> int -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_row : t -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val concat : axis:int -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val split : axis:int -> int array -> t -> t array

Refer to :doc:`owl_dense_ndarray_generic`

val of_arrays : t array array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val to_arrays : t -> t array array

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diag : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diagm : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val trace : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val triu : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tril : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/NN/index.html b/owl-base/Owl_algodiff_generic/Make/NN/index.html deleted file mode 100644 index 6e4a3a37b..000000000 --- a/owl-base/Owl_algodiff_generic/Make/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl-base.Owl_algodiff_generic.Make.NN)

Module Make.NN

val dropout : ?rate:float -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:A.elt -> int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/argument-1-A/Linalg/index.html b/owl-base/Owl_algodiff_generic/Make/argument-1-A/Linalg/index.html deleted file mode 100644 index 8fae43576..000000000 --- a/owl-base/Owl_algodiff_generic/Make/argument-1-A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_generic.Make.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/argument-1-A/Mat/index.html b/owl-base/Owl_algodiff_generic/Make/argument-1-A/Mat/index.html deleted file mode 100644 index d8c2c71b5..000000000 --- a/owl-base/Owl_algodiff_generic/Make/argument-1-A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_generic.Make.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/argument-1-A/Scalar/index.html b/owl-base/Owl_algodiff_generic/Make/argument-1-A/Scalar/index.html deleted file mode 100644 index a5560ac64..000000000 --- a/owl-base/Owl_algodiff_generic/Make/argument-1-A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_algodiff_generic.Make.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/argument-1-A/index.html b/owl-base/Owl_algodiff_generic/Make/argument-1-A/index.html deleted file mode 100644 index 346256bcf..000000000 --- a/owl-base/Owl_algodiff_generic/Make/argument-1-A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_algodiff_generic.Make.A)

Parameter Make.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic/Make/index.html b/owl-base/Owl_algodiff_generic/Make/index.html deleted file mode 100644 index 81397d5a7..000000000 --- a/owl-base/Owl_algodiff_generic/Make/index.html +++ /dev/null @@ -1,14 +0,0 @@ - -Make (owl-base.Owl_algodiff_generic.Make)

Module Owl_algodiff_generic.Make

Parameters

Signature

include Owl_algodiff_core_sig.Sig - with type A.arr = A.arr - with type A.elt = A.elt
module A : - Owl_types_ndarray_algodiff.Sig with type arr = A.arr with type elt = A.elt
Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

val make_forward : t -> t -> int -> t

TODO

val make_reverse : t -> int -> t

TODO

val reverse_prop : t -> t -> unit

TODO

val diff : (t -> t) -> t -> t

diff f x returns the exat derivative of a function f : scalar -> scalar at point x. Simply calling diff f will return its derivative function g of the same type, i.e. g : scalar -> scalar.

Keep calling this function will give you higher-order derivatives of f, i.e. f |> diff |> diff |> diff |> ...

val diff' : (t -> t) -> t -> t * t

similar to diff, but return (f x, diff f x).

val grad : (t -> t) -> t -> t

gradient of f : (vector -> scalar) at x, reverse ad.

val grad' : (t -> t) -> t -> t * t

similar to grad, but return (f x, grad f x).

val jacobian : (t -> t) -> t -> t

jacobian of f : (vector -> vector) at x, both x and y are row vectors.

val jacobian' : (t -> t) -> t -> t * t

similar to jacobian, but return (f x, jacobian f x)

val jacobianv : (t -> t) -> t -> t -> t

jacobian vector product of f : (vector -> vector) at x along v, forward ad. Namely, it calcultes (jacobian x) v

val jacobianv' : (t -> t) -> t -> t -> t * t

similar to jacobianv', but return (f x, jacobianv f x v)

val jacobianTv : (t -> t) -> t -> t -> t

transposed jacobian vector product of f : (vector -> vector) at x along v, backward ad. Namely, it calculates transpose ((jacobianv f x v)).

val jacobianTv' : (t -> t) -> t -> t -> t * t

similar to jacobianTv, but return (f x, transpose (jacobianv f x v))

val hessian : (t -> t) -> t -> t

hessian of f : (scalar -> scalar) at x.

val hessian' : (t -> t) -> t -> t * t

simiarl to hessian, but return (f x, hessian f x)

val hessianv : (t -> t) -> t -> t -> t

hessian vector product of f : (scalar -> scalar) at x along v. Namely, it calculates (hessian x) v.

val hessianv' : (t -> t) -> t -> t -> t * t

similar to hessianv, but return (f x, hessianv f x v).

val laplacian : (t -> t) -> t -> t

laplacian of f : (scalar -> scalar) at x.

val laplacian' : (t -> t) -> t -> t * t

similar to laplacian, but return (f x, laplacian f x).

val gradhessian : (t -> t) -> t -> t * t

return (grad f x, hessian f x), f : (scalar -> scalar)

val gradhessian' : (t -> t) -> t -> t * t * t

return (f x, grad f x, hessian f x)

val gradhessianv : (t -> t) -> t -> t -> t * t

return (grad f x v, hessian f x v)

val gradhessianv' : (t -> t) -> t -> t -> t * t * t

return (f x, grad f x v, hessian f x v)

include Owl_algodiff_ops_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
module Builder : - Owl_algodiff_ops_builder_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
Supported Maths functions
module Maths : sig ... end
Supported Linalg functions
module Linalg : sig ... end
Supported Neural Network functions
module NN : sig ... end
Supported Mat functions
module Mat : sig ... end
Supported Arr functions
module Arr : sig ... end
Helper functions
include Owl_algodiff_graph_convert_sig.Sig with type t := t
val to_trace : t list -> string

to_trace [t0; t1; ...] outputs the trace of computation graph on the terminal in a human-readable format.

val to_dot : t list -> string

to_dot [t0; t1; ...] outputs the trace of computation graph in the dot file format which you can use other tools further visualisation, such as Graphviz.

val pp_num : Stdlib.Format.formatter -> t -> unit

pp_num t pretty prints the abstract number used in Algodiff.

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/.dummy b/owl-base/Owl_algodiff_generic_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/Linalg/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/Linalg/index.html deleted file mode 100644 index fe3e370c1..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_generic_sig.Sig.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/Mat/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/Mat/index.html deleted file mode 100644 index 1b92bb315..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_generic_sig.Sig.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/Scalar/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/Scalar/index.html deleted file mode 100644 index b0ad855ba..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_algodiff_generic_sig.Sig.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/index.html deleted file mode 100644 index d96dab17f..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_algodiff_generic_sig.Sig.A)

Module Sig.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Arr/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Arr/index.html deleted file mode 100644 index e6b24aa15..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_algodiff_generic_sig.Sig.Arr)

Module Sig.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/index.html deleted file mode 100644 index a298de65e..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_algodiff_generic_sig.Sig.Builder)

Module Sig.Builder

Ops Builder
module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t

build array input single output operations

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Aiso/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Aiso/index.html deleted file mode 100644 index 66b5f8b3a..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_algodiff_generic_sig.Sig.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Piso/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Piso/index.html deleted file mode 100644 index 2253039f2..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_algodiff_generic_sig.Sig.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Siao/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Siao/index.html deleted file mode 100644 index 23a394d9e..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_algodiff_generic_sig.Sig.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Sipo/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Sipo/index.html deleted file mode 100644 index f0f5d57eb..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_algodiff_generic_sig.Sig.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Siso/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Siso/index.html deleted file mode 100644 index 0098d6b47..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_algodiff_generic_sig.Sig.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Sito/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Sito/index.html deleted file mode 100644 index c5fc17dc7..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_algodiff_generic_sig.Sig.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Linalg/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Linalg/index.html deleted file mode 100644 index 773be344c..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_generic_sig.Sig.Linalg)

Module Sig.Linalg

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val logdet : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val chol : ?upper:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val qr : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val lq : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val svd : ?thin:bool -> t -> t * t * t

Refer to :doc:`owl_dense_ndarray_generic`

val sylvester : t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val lyapunov : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - t -> - t -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val (/@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val care : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dare : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Mat/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Mat/index.html deleted file mode 100644 index 6e5d83735..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_generic_sig.Sig.Mat)

Module Sig.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Maths/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Maths/index.html deleted file mode 100644 index b73e8a67a..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_algodiff_generic_sig.Sig.Maths)

Module Sig.Maths

val (+) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (-) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (/) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (**) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val add : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val div : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val kron : t -> t -> t

Refer to :doc:`owl_dense_matrix_generic`

val dot : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cross_entropy : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val abs : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val round : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mean : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val swap : int -> int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_item : t -> int -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_row : t -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val concat : axis:int -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val split : axis:int -> int array -> t -> t array

Refer to :doc:`owl_dense_ndarray_generic`

val of_arrays : t array array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val to_arrays : t -> t array array

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diag : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diagm : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val trace : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val triu : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tril : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/NN/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/NN/index.html deleted file mode 100644 index ed70959a2..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl-base.Owl_algodiff_generic_sig.Sig.NN)

Module Sig.NN

val dropout : ?rate:float -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:A.elt -> int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/index.html b/owl-base/Owl_algodiff_generic_sig/module-type-Sig/index.html deleted file mode 100644 index 860ba545a..000000000 --- a/owl-base/Owl_algodiff_generic_sig/module-type-Sig/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Sig (owl-base.Owl_algodiff_generic_sig.Sig)

Module type Owl_algodiff_generic_sig.Sig

include Owl_algodiff_core_sig.Sig
Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

val make_forward : t -> t -> int -> t

TODO

val make_reverse : t -> int -> t

TODO

val reverse_prop : t -> t -> unit

TODO

val diff : (t -> t) -> t -> t

diff f x returns the exat derivative of a function f : scalar -> scalar at point x. Simply calling diff f will return its derivative function g of the same type, i.e. g : scalar -> scalar.

Keep calling this function will give you higher-order derivatives of f, i.e. f |> diff |> diff |> diff |> ...

val diff' : (t -> t) -> t -> t * t

similar to diff, but return (f x, diff f x).

val grad : (t -> t) -> t -> t

gradient of f : (vector -> scalar) at x, reverse ad.

val grad' : (t -> t) -> t -> t * t

similar to grad, but return (f x, grad f x).

val jacobian : (t -> t) -> t -> t

jacobian of f : (vector -> vector) at x, both x and y are row vectors.

val jacobian' : (t -> t) -> t -> t * t

similar to jacobian, but return (f x, jacobian f x)

val jacobianv : (t -> t) -> t -> t -> t

jacobian vector product of f : (vector -> vector) at x along v, forward ad. Namely, it calcultes (jacobian x) v

val jacobianv' : (t -> t) -> t -> t -> t * t

similar to jacobianv', but return (f x, jacobianv f x v)

val jacobianTv : (t -> t) -> t -> t -> t

transposed jacobian vector product of f : (vector -> vector) at x along v, backward ad. Namely, it calculates transpose ((jacobianv f x v)).

val jacobianTv' : (t -> t) -> t -> t -> t * t

similar to jacobianTv, but return (f x, transpose (jacobianv f x v))

val hessian : (t -> t) -> t -> t

hessian of f : (scalar -> scalar) at x.

val hessian' : (t -> t) -> t -> t * t

simiarl to hessian, but return (f x, hessian f x)

val hessianv : (t -> t) -> t -> t -> t

hessian vector product of f : (scalar -> scalar) at x along v. Namely, it calculates (hessian x) v.

val hessianv' : (t -> t) -> t -> t -> t * t

similar to hessianv, but return (f x, hessianv f x v).

val laplacian : (t -> t) -> t -> t

laplacian of f : (scalar -> scalar) at x.

val laplacian' : (t -> t) -> t -> t * t

similar to laplacian, but return (f x, laplacian f x).

val gradhessian : (t -> t) -> t -> t * t

return (grad f x, hessian f x), f : (scalar -> scalar)

val gradhessian' : (t -> t) -> t -> t * t * t

return (f x, grad f x, hessian f x)

val gradhessianv : (t -> t) -> t -> t -> t * t

return (grad f x v, hessian f x v)

val gradhessianv' : (t -> t) -> t -> t -> t * t * t

return (f x, grad f x v, hessian f x v)

include Owl_algodiff_ops_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
module Builder : - Owl_algodiff_ops_builder_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
Supported Maths functions
module Maths : sig ... end
Supported Linalg functions
module Linalg : sig ... end
Supported Neural Network functions
module NN : sig ... end
Supported Mat functions
module Mat : sig ... end
Supported Arr functions
module Arr : sig ... end
Helper functions
include Owl_algodiff_graph_convert_sig.Sig with type t := t
val to_trace : t list -> string

to_trace [t0; t1; ...] outputs the trace of computation graph on the terminal in a human-readable format.

val to_dot : t list -> string

to_dot [t0; t1; ...] outputs the trace of computation graph in the dot file format which you can use other tools further visualisation, such as Graphviz.

val pp_num : Stdlib.Format.formatter -> t -> unit

pp_num t pretty prints the abstract number used in Algodiff.

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_graph_convert/.dummy b/owl-base/Owl_algodiff_graph_convert/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/Linalg/index.html b/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/Linalg/index.html deleted file mode 100644 index ed429f461..000000000 --- a/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_graph_convert.Make.Core.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/Mat/index.html b/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/Mat/index.html deleted file mode 100644 index f94c292da..000000000 --- a/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_graph_convert.Make.Core.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/Scalar/index.html b/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/Scalar/index.html deleted file mode 100644 index f99213a05..000000000 --- a/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_algodiff_graph_convert.Make.Core.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/index.html b/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/index.html deleted file mode 100644 index 9e5e67f69..000000000 --- a/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_algodiff_graph_convert.Make.Core.A)

Module Core.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/index.html b/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/index.html deleted file mode 100644 index e7441c68f..000000000 --- a/owl-base/Owl_algodiff_graph_convert/Make/argument-1-Core/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Core (owl-base.Owl_algodiff_graph_convert.Make.Core)

Parameter Make.Core

Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_graph_convert/Make/index.html b/owl-base/Owl_algodiff_graph_convert/Make/index.html deleted file mode 100644 index a64faa2bd..000000000 --- a/owl-base/Owl_algodiff_graph_convert/Make/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make (owl-base.Owl_algodiff_graph_convert.Make)

Module Owl_algodiff_graph_convert.Make

Parameters

Signature

val to_trace : Core.t list -> string

to_trace [t0; t1; ...] outputs the trace of computation graph on the terminal in a human-readable format.

val to_dot : Core.t list -> string

to_dot [t0; t1; ...] outputs the trace of computation graph in the dot file format which you can use other tools further visualisation, such as Graphviz.

val pp_num : Stdlib.Format.formatter -> Core.t -> unit

pp_num t pretty prints the abstract number used in Algodiff.

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_graph_convert_sig/.dummy b/owl-base/Owl_algodiff_graph_convert_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_algodiff_graph_convert_sig/module-type-Sig/index.html b/owl-base/Owl_algodiff_graph_convert_sig/module-type-Sig/index.html deleted file mode 100644 index fec25de5a..000000000 --- a/owl-base/Owl_algodiff_graph_convert_sig/module-type-Sig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Sig (owl-base.Owl_algodiff_graph_convert_sig.Sig)

Module type Owl_algodiff_graph_convert_sig.Sig

type t
val to_trace : t list -> string

to_trace [t0; t1; ...] outputs the trace of computation graph on the terminal in a human-readable format.

val to_dot : t list -> string

to_dot [t0; t1; ...] outputs the trace of computation graph in the dot file format which you can use other tools further visualisation, such as Graphviz.

val pp_num : Stdlib.Format.formatter -> t -> unit

pp_num t pretty prints the abstract number used in Algodiff.

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/.dummy b/owl-base/Owl_algodiff_ops/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_algodiff_ops/Make/Arr/index.html b/owl-base/Owl_algodiff_ops/Make/Arr/index.html deleted file mode 100644 index 3b775a973..000000000 --- a/owl-base/Owl_algodiff_ops/Make/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_algodiff_ops.Make.Arr)

Module Make.Arr

val empty : int array -> Core.t
val zeros : int array -> Core.t
val ones : int array -> Core.t
val uniform : ?a:Core.A.elt -> ?b:Core.A.elt -> int array -> Core.t
val gaussian : ?mu:Core.A.elt -> ?sigma:Core.A.elt -> int array -> Core.t
val shape : Core.t -> int array
val numel : Core.t -> int
val reset : Core.t -> unit
val reshape : Core.t -> int array -> Core.t
val add : Core.t -> Core.t -> Core.t
val sub : Core.t -> Core.t -> Core.t
val mul : Core.t -> Core.t -> Core.t
val div : Core.t -> Core.t -> Core.t
val dot : Core.t -> Core.t -> Core.t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/Builder/index.html b/owl-base/Owl_algodiff_ops/Make/Builder/index.html deleted file mode 100644 index 33be8bb68..000000000 --- a/owl-base/Owl_algodiff_ops/Make/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_algodiff_ops.Make.Builder)

Module Make.Builder

Ops Builder
module type Siso = sig ... end
val build_siso : (module Siso) -> Core.t -> Core.t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> Core.t -> Core.t * Core.t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> Core.t -> Core.t * Core.t * Core.t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> Core.t -> Core.t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> Core.t -> Core.t -> Core.t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> Core.t array -> Core.t

build array input single output operations

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Aiso/index.html b/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Aiso/index.html deleted file mode 100644 index 6069df4de..000000000 --- a/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_algodiff_ops.Make.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : Core.t array -> Core.t
val df : int list -> Core.t -> Core.t array -> Core.t array -> Core.t
val dr : int list -> Core.t array -> Core.t -> Core.t Stdlib.ref -> Core.t list
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Piso/index.html b/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Piso/index.html deleted file mode 100644 index 420fae291..000000000 --- a/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_algodiff_ops.Make.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : Core.A.elt -> Core.A.elt -> Core.t
val ff_ab : Core.A.elt -> Core.A.arr -> Core.t
val ff_ba : Core.A.arr -> Core.A.elt -> Core.t
val ff_bb : Core.A.arr -> Core.A.arr -> Core.t
val df_da : Core.t -> Core.t -> Core.t -> Core.t -> Core.t
val df_db : Core.t -> Core.t -> Core.t -> Core.t -> Core.t
val df_dab : Core.t -> Core.t -> Core.t -> Core.t -> Core.t -> Core.t
val dr_ab : Core.t -> Core.t -> Core.t -> Core.t Stdlib.ref -> Core.t * Core.t
val dr_a : Core.t -> Core.t -> Core.t -> Core.t Stdlib.ref -> Core.t
val dr_b : Core.t -> Core.t -> Core.t -> Core.t Stdlib.ref -> Core.t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Siao/index.html b/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Siao/index.html deleted file mode 100644 index 2538033d0..000000000 --- a/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Siao (owl-base.Owl_algodiff_ops.Make.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : Core.A.elt -> Core.t array
val ff_arr : Core.A.arr -> Core.t array
val df : Core.t array -> Core.t -> Core.t -> Core.t array
val dr : - Core.t -> - Core.t -> - Core.t Stdlib.ref array -> - Core.t Stdlib.ref array -> - Core.t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Sipo/index.html b/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Sipo/index.html deleted file mode 100644 index 5c1e4f836..000000000 --- a/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_algodiff_ops.Make.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : Core.A.elt -> Core.t * Core.t
val ff_arr : Core.A.arr -> Core.t * Core.t
val df : Core.t -> Core.t -> Core.t -> Core.t
val dr : - Core.t -> - Core.t -> - (Core.t Stdlib.ref * Core.t Stdlib.ref) -> - (Core.t Stdlib.ref * Core.t Stdlib.ref) -> - Core.t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Siso/index.html b/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Siso/index.html deleted file mode 100644 index a0f877a7d..000000000 --- a/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_algodiff_ops.Make.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : Core.A.elt -> Core.t
val ff_arr : Core.A.arr -> Core.t
val df : Core.t -> Core.t -> Core.t -> Core.t
val dr : Core.t -> Core.t -> Core.t Stdlib.ref -> Core.t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Sito/index.html b/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Sito/index.html deleted file mode 100644 index 17039bec4..000000000 --- a/owl-base/Owl_algodiff_ops/Make/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_algodiff_ops.Make.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : Core.A.elt -> Core.t * Core.t * Core.t
val ff_arr : Core.A.arr -> Core.t * Core.t * Core.t
val df : Core.t -> Core.t -> Core.t -> Core.t
val dr : - Core.t -> - Core.t -> - (Core.t Stdlib.ref * Core.t Stdlib.ref * Core.t Stdlib.ref) -> - (Core.t Stdlib.ref * Core.t Stdlib.ref * Core.t Stdlib.ref) -> - Core.t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/Linalg/index.html b/owl-base/Owl_algodiff_ops/Make/Linalg/index.html deleted file mode 100644 index 290ea4263..000000000 --- a/owl-base/Owl_algodiff_ops/Make/Linalg/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_ops.Make.Linalg)

Module Make.Linalg

val inv : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val logdet : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val chol : ?upper:bool -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val qr : Core.t -> Core.t * Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val lq : Core.t -> Core.t * Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val svd : ?thin:bool -> Core.t -> Core.t * Core.t * Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val sylvester : Core.t -> Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val lyapunov : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - Core.t -> - Core.t -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val (/@) : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val linsolve : - ?trans:bool -> - ?typ:[ `n | `u | `l ] -> - Core.t -> - Core.t -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val care : ?diag_r:bool -> Core.t -> Core.t -> Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val dare : ?diag_r:bool -> Core.t -> Core.t -> Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/Mat/index.html b/owl-base/Owl_algodiff_ops/Make/Mat/index.html deleted file mode 100644 index e6e21be4a..000000000 --- a/owl-base/Owl_algodiff_ops/Make/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_ops.Make.Mat)

Module Make.Mat

val empty : int -> int -> Core.t
val zeros : int -> int -> Core.t
val eye : int -> Core.t
val ones : int -> int -> Core.t
val uniform : ?a:Core.A.elt -> ?b:Core.A.elt -> int -> int -> Core.t
val gaussian : ?mu:Core.A.elt -> ?sigma:Core.A.elt -> int -> int -> Core.t
val shape : Core.t -> int * int
val numel : Core.t -> int
val row_num : Core.t -> int
val col_num : Core.t -> int
val reset : Core.t -> unit
val reshape : int -> int -> Core.t -> Core.t
val get : Core.t -> int -> int -> Core.t
val set : Core.t -> int -> int -> Core.t -> Core.t
val row : Core.t -> int -> Core.t
val mean : Core.t -> Core.t
val add : Core.t -> Core.t -> Core.t
val sub : Core.t -> Core.t -> Core.t
val mul : Core.t -> Core.t -> Core.t
val div : Core.t -> Core.t -> Core.t
val dot : Core.t -> Core.t -> Core.t
val map_by_row : (Core.t -> Core.t) -> Core.t -> Core.t
val of_arrays : Core.A.elt array array -> Core.t
val init_2d : int -> int -> (int -> int -> Core.t) -> Core.t
val print : Core.t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/Maths/index.html b/owl-base/Owl_algodiff_ops/Make/Maths/index.html deleted file mode 100644 index 2cbc79de6..000000000 --- a/owl-base/Owl_algodiff_ops/Make/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_algodiff_ops.Make.Maths)

Module Make.Maths

val (+) : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val (-) : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val (*) : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val (/) : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val (*@) : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val (**) : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val add : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val div : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val kron : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_matrix_generic`

val dot : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val cross_entropy : Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val inv : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val abs : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val round : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val log : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val mean : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val swap : int -> int -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : Core.t -> int array -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val get_item : Core.t -> int -> int -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val get_row : Core.t -> int -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val concat : axis:int -> Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val split : axis:int -> int array -> Core.t -> Core.t array

Refer to :doc:`owl_dense_ndarray_generic`

val of_arrays : Core.t array array -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val to_arrays : Core.t -> Core.t array array

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : axis:int -> Core.t array -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : axis:int -> Core.t array -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> Core.t -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val diag : ?k:int -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val diagm : ?k:int -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val trace : Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val triu : ?k:int -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val tril : ?k:int -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/NN/index.html b/owl-base/Owl_algodiff_ops/Make/NN/index.html deleted file mode 100644 index 477a9124e..000000000 --- a/owl-base/Owl_algodiff_ops/Make/NN/index.html +++ /dev/null @@ -1,80 +0,0 @@ - -NN (owl-base.Owl_algodiff_ops.Make.NN)

Module Make.NN

val dropout : ?rate:float -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : - ?padding:Owl_types.padding -> - Core.t -> - Core.t -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : - ?padding:Owl_types.padding -> - Core.t -> - Core.t -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : - ?padding:Owl_types.padding -> - Core.t -> - Core.t -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - Core.t -> - Core.t -> - int array -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - Core.t -> - Core.t -> - int array -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - Core.t -> - Core.t -> - int array -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : - ?padding:Owl_types.padding -> - Core.t -> - Core.t -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : - ?padding:Owl_types.padding -> - Core.t -> - Core.t -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : - ?padding:Owl_types.padding -> - Core.t -> - Core.t -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : - Owl_types.padding -> - Core.t -> - int array -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : - Owl_types.padding -> - Core.t -> - int array -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : - Owl_types.padding -> - Core.t -> - int array -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : - Owl_types.padding -> - Core.t -> - int array -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : - Owl_types.padding -> - Core.t -> - int array -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : - Owl_types.padding -> - Core.t -> - int array -> - int array -> - Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : Core.t -> int array -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:Core.A.elt -> int list list -> Core.t -> Core.t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/Linalg/index.html b/owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/Linalg/index.html deleted file mode 100644 index 243ce6dff..000000000 --- a/owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_ops.Make.Core.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/Mat/index.html b/owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/Mat/index.html deleted file mode 100644 index 633c56ac3..000000000 --- a/owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_ops.Make.Core.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/Scalar/index.html b/owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/Scalar/index.html deleted file mode 100644 index aef76547e..000000000 --- a/owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_algodiff_ops.Make.Core.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/index.html b/owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/index.html deleted file mode 100644 index 01504a03d..000000000 --- a/owl-base/Owl_algodiff_ops/Make/argument-1-Core/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_algodiff_ops.Make.Core.A)

Module Core.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/argument-1-Core/index.html b/owl-base/Owl_algodiff_ops/Make/argument-1-Core/index.html deleted file mode 100644 index f8af8c431..000000000 --- a/owl-base/Owl_algodiff_ops/Make/argument-1-Core/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Core (owl-base.Owl_algodiff_ops.Make.Core)

Parameter Make.Core

Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops/Make/index.html b/owl-base/Owl_algodiff_ops/Make/index.html deleted file mode 100644 index a8b6151a8..000000000 --- a/owl-base/Owl_algodiff_ops/Make/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Make (owl-base.Owl_algodiff_ops.Make)

Module Owl_algodiff_ops.Make

Parameters

Signature

module Builder : - Owl_algodiff_ops_builder_sig.Sig - with type t := Core.t - and type elt := Core.A.elt - and type arr := Core.A.arr - and type op := Core.op
Supported Maths functions
module Maths : sig ... end
Supported Linalg functions
module Linalg : sig ... end
Supported Neural Network functions
module NN : sig ... end
Supported Mat functions
module Mat : sig ... end
Supported Arr functions
module Arr : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder/.dummy b/owl-base/Owl_algodiff_ops_builder/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/Linalg/index.html b/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/Linalg/index.html deleted file mode 100644 index c472c8161..000000000 --- a/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_ops_builder.Make.Core.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/Mat/index.html b/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/Mat/index.html deleted file mode 100644 index d53bc47c8..000000000 --- a/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_ops_builder.Make.Core.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/Scalar/index.html b/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/Scalar/index.html deleted file mode 100644 index e4b215085..000000000 --- a/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_algodiff_ops_builder.Make.Core.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/index.html b/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/index.html deleted file mode 100644 index e57604a56..000000000 --- a/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_algodiff_ops_builder.Make.Core.A)

Module Core.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/index.html b/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/index.html deleted file mode 100644 index fc9f71265..000000000 --- a/owl-base/Owl_algodiff_ops_builder/Make/argument-1-Core/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Core (owl-base.Owl_algodiff_ops_builder.Make.Core)

Parameter Make.Core

Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder/Make/index.html b/owl-base/Owl_algodiff_ops_builder/Make/index.html deleted file mode 100644 index c46d5f474..000000000 --- a/owl-base/Owl_algodiff_ops_builder/Make/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make (owl-base.Owl_algodiff_ops_builder.Make)

Module Owl_algodiff_ops_builder.Make

Parameters

Signature

module type Siso = sig ... end
val build_siso : (module Siso) -> Core.t -> Core.t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> Core.t -> Core.t * Core.t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> Core.t -> Core.t * Core.t * Core.t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> Core.t -> Core.t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> Core.t -> Core.t -> Core.t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> Core.t array -> Core.t

build array input single output operations

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder/Make/module-type-Aiso/index.html b/owl-base/Owl_algodiff_ops_builder/Make/module-type-Aiso/index.html deleted file mode 100644 index 654b4d7a2..000000000 --- a/owl-base/Owl_algodiff_ops_builder/Make/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_algodiff_ops_builder.Make.Aiso)

Module type Make.Aiso

val label : string
val ff : Core.t array -> Core.t
val df : int list -> Core.t -> Core.t array -> Core.t array -> Core.t
val dr : int list -> Core.t array -> Core.t -> Core.t Stdlib.ref -> Core.t list
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder/Make/module-type-Piso/index.html b/owl-base/Owl_algodiff_ops_builder/Make/module-type-Piso/index.html deleted file mode 100644 index 4de2b4af7..000000000 --- a/owl-base/Owl_algodiff_ops_builder/Make/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_algodiff_ops_builder.Make.Piso)

Module type Make.Piso

val label : string
val ff_aa : Core.A.elt -> Core.A.elt -> Core.t
val ff_ab : Core.A.elt -> Core.A.arr -> Core.t
val ff_ba : Core.A.arr -> Core.A.elt -> Core.t
val ff_bb : Core.A.arr -> Core.A.arr -> Core.t
val df_da : Core.t -> Core.t -> Core.t -> Core.t -> Core.t
val df_db : Core.t -> Core.t -> Core.t -> Core.t -> Core.t
val df_dab : Core.t -> Core.t -> Core.t -> Core.t -> Core.t -> Core.t
val dr_ab : Core.t -> Core.t -> Core.t -> Core.t Stdlib.ref -> Core.t * Core.t
val dr_a : Core.t -> Core.t -> Core.t -> Core.t Stdlib.ref -> Core.t
val dr_b : Core.t -> Core.t -> Core.t -> Core.t Stdlib.ref -> Core.t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder/Make/module-type-Siao/index.html b/owl-base/Owl_algodiff_ops_builder/Make/module-type-Siao/index.html deleted file mode 100644 index 2542af41c..000000000 --- a/owl-base/Owl_algodiff_ops_builder/Make/module-type-Siao/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Siao (owl-base.Owl_algodiff_ops_builder.Make.Siao)

Module type Make.Siao

val label : string
val ff_f : Core.A.elt -> Core.t array
val ff_arr : Core.A.arr -> Core.t array
val df : Core.t array -> Core.t -> Core.t -> Core.t array
val dr : - Core.t -> - Core.t -> - Core.t Stdlib.ref array -> - Core.t Stdlib.ref array -> - Core.t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder/Make/module-type-Sipo/index.html b/owl-base/Owl_algodiff_ops_builder/Make/module-type-Sipo/index.html deleted file mode 100644 index 84c9e9c65..000000000 --- a/owl-base/Owl_algodiff_ops_builder/Make/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_algodiff_ops_builder.Make.Sipo)

Module type Make.Sipo

val label : string
val ff_f : Core.A.elt -> Core.t * Core.t
val ff_arr : Core.A.arr -> Core.t * Core.t
val df : Core.t -> Core.t -> Core.t -> Core.t
val dr : - Core.t -> - Core.t -> - (Core.t Stdlib.ref * Core.t Stdlib.ref) -> - (Core.t Stdlib.ref * Core.t Stdlib.ref) -> - Core.t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder/Make/module-type-Siso/index.html b/owl-base/Owl_algodiff_ops_builder/Make/module-type-Siso/index.html deleted file mode 100644 index cd617a8a8..000000000 --- a/owl-base/Owl_algodiff_ops_builder/Make/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_algodiff_ops_builder.Make.Siso)

Module type Make.Siso

val label : string
val ff_f : Core.A.elt -> Core.t
val ff_arr : Core.A.arr -> Core.t
val df : Core.t -> Core.t -> Core.t -> Core.t
val dr : Core.t -> Core.t -> Core.t Stdlib.ref -> Core.t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder/Make/module-type-Sito/index.html b/owl-base/Owl_algodiff_ops_builder/Make/module-type-Sito/index.html deleted file mode 100644 index 1f1aaf321..000000000 --- a/owl-base/Owl_algodiff_ops_builder/Make/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_algodiff_ops_builder.Make.Sito)

Module type Make.Sito

val label : string
val ff_f : Core.A.elt -> Core.t * Core.t * Core.t
val ff_arr : Core.A.arr -> Core.t * Core.t * Core.t
val df : Core.t -> Core.t -> Core.t -> Core.t
val dr : - Core.t -> - Core.t -> - (Core.t Stdlib.ref * Core.t Stdlib.ref * Core.t Stdlib.ref) -> - (Core.t Stdlib.ref * Core.t Stdlib.ref * Core.t Stdlib.ref) -> - Core.t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder_sig/.dummy b/owl-base/Owl_algodiff_ops_builder_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/index.html b/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/index.html deleted file mode 100644 index c86ba1e4f..000000000 --- a/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Sig (owl-base.Owl_algodiff_ops_builder_sig.Sig)

Module type Owl_algodiff_ops_builder_sig.Sig

type elt
type arr
type t
type op
module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t

build array input single output operations

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Aiso/index.html b/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Aiso/index.html deleted file mode 100644 index 9a1853374..000000000 --- a/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_algodiff_ops_builder_sig.Sig.Aiso)

Module type Sig.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Piso/index.html b/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Piso/index.html deleted file mode 100644 index 696b9a46c..000000000 --- a/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_algodiff_ops_builder_sig.Sig.Piso)

Module type Sig.Piso

val label : string
val ff_aa : elt -> elt -> t
val ff_ab : elt -> arr -> t
val ff_ba : arr -> elt -> t
val ff_bb : arr -> arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Siao/index.html b/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Siao/index.html deleted file mode 100644 index 71d744da9..000000000 --- a/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_algodiff_ops_builder_sig.Sig.Siao)

Module type Sig.Siao

val label : string
val ff_f : elt -> t array
val ff_arr : arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Sipo/index.html b/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Sipo/index.html deleted file mode 100644 index 8b7ae738b..000000000 --- a/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_algodiff_ops_builder_sig.Sig.Sipo)

Module type Sig.Sipo

val label : string
val ff_f : elt -> t * t
val ff_arr : arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Siso/index.html b/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Siso/index.html deleted file mode 100644 index 1a948c70b..000000000 --- a/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_algodiff_ops_builder_sig.Sig.Siso)

Module type Sig.Siso

val label : string
val ff_f : elt -> t
val ff_arr : arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Sito/index.html b/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Sito/index.html deleted file mode 100644 index 7fd6244a5..000000000 --- a/owl-base/Owl_algodiff_ops_builder_sig/module-type-Sig/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_algodiff_ops_builder_sig.Sig.Sito)

Module type Sig.Sito

val label : string
val ff_f : elt -> t * t * t
val ff_arr : arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_sig/.dummy b/owl-base/Owl_algodiff_ops_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Arr/index.html b/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Arr/index.html deleted file mode 100644 index b6de398e7..000000000 --- a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_algodiff_ops_sig.Sig.Arr)

Module Sig.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:elt -> ?b:elt -> int array -> t
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/index.html b/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/index.html deleted file mode 100644 index 4eab5a465..000000000 --- a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_algodiff_ops_sig.Sig.Builder)

Module Sig.Builder

Ops Builder
module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t

build array input single output operations

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Aiso/index.html b/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Aiso/index.html deleted file mode 100644 index e08b12803..000000000 --- a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_algodiff_ops_sig.Sig.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Piso/index.html b/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Piso/index.html deleted file mode 100644 index 956c2e545..000000000 --- a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_algodiff_ops_sig.Sig.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : elt -> elt -> t
val ff_ab : elt -> arr -> t
val ff_ba : arr -> elt -> t
val ff_bb : arr -> arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Siao/index.html b/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Siao/index.html deleted file mode 100644 index 3be40e65a..000000000 --- a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_algodiff_ops_sig.Sig.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : elt -> t array
val ff_arr : arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Sipo/index.html b/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Sipo/index.html deleted file mode 100644 index a052c08cb..000000000 --- a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_algodiff_ops_sig.Sig.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : elt -> t * t
val ff_arr : arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Siso/index.html b/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Siso/index.html deleted file mode 100644 index 13478544b..000000000 --- a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_algodiff_ops_sig.Sig.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : elt -> t
val ff_arr : arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Sito/index.html b/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Sito/index.html deleted file mode 100644 index cfd337c03..000000000 --- a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_algodiff_ops_sig.Sig.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : elt -> t * t * t
val ff_arr : arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Linalg/index.html b/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Linalg/index.html deleted file mode 100644 index 506f15972..000000000 --- a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_ops_sig.Sig.Linalg)

Module Sig.Linalg

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val logdet : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val chol : ?upper:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val qr : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val lq : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val svd : ?thin:bool -> t -> t * t * t

Refer to :doc:`owl_dense_ndarray_generic`

val sylvester : t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val lyapunov : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - t -> - t -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val (/@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val care : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dare : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Mat/index.html b/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Mat/index.html deleted file mode 100644 index 41e82a2d2..000000000 --- a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_ops_sig.Sig.Mat)

Module Sig.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:elt -> ?b:elt -> int -> int -> t
val gaussian : ?mu:elt -> ?sigma:elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Maths/index.html b/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Maths/index.html deleted file mode 100644 index 72020a75c..000000000 --- a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_algodiff_ops_sig.Sig.Maths)

Module Sig.Maths

val (+) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (-) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (/) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (**) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val add : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val div : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val kron : t -> t -> t

Refer to :doc:`owl_dense_matrix_generic`

val dot : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cross_entropy : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val abs : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val round : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mean : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val swap : int -> int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_item : t -> int -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_row : t -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val concat : axis:int -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val split : axis:int -> int array -> t -> t array

Refer to :doc:`owl_dense_ndarray_generic`

val of_arrays : t array array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val to_arrays : t -> t array array

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diag : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diagm : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val trace : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val triu : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tril : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/NN/index.html b/owl-base/Owl_algodiff_ops_sig/module-type-Sig/NN/index.html deleted file mode 100644 index bca834a79..000000000 --- a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl-base.Owl_algodiff_ops_sig.Sig.NN)

Module Sig.NN

val dropout : ?rate:float -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:elt -> int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/index.html b/owl-base/Owl_algodiff_ops_sig/module-type-Sig/index.html deleted file mode 100644 index bcb9a3316..000000000 --- a/owl-base/Owl_algodiff_ops_sig/module-type-Sig/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sig (owl-base.Owl_algodiff_ops_sig.Sig)

Module type Owl_algodiff_ops_sig.Sig

type t
type elt
type arr
type op
module Builder : - Owl_algodiff_ops_builder_sig.Sig - with type t := t - and type elt := elt - and type arr := arr - and type op := op
Supported Maths functions
module Maths : sig ... end
Supported Linalg functions
module Linalg : sig ... end
Supported Neural Network functions
module NN : sig ... end
Supported Mat functions
module Mat : sig ... end
Supported Arr functions
module Arr : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_reverse/.dummy b/owl-base/Owl_algodiff_reverse/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/Linalg/index.html b/owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/Linalg/index.html deleted file mode 100644 index 9d8e800fe..000000000 --- a/owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_reverse.Make.C.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/Mat/index.html b/owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/Mat/index.html deleted file mode 100644 index f973aeff4..000000000 --- a/owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_reverse.Make.C.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/Scalar/index.html b/owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/Scalar/index.html deleted file mode 100644 index f7cc9e74a..000000000 --- a/owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_algodiff_reverse.Make.C.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/index.html b/owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/index.html deleted file mode 100644 index 2fa03c47b..000000000 --- a/owl-base/Owl_algodiff_reverse/Make/argument-1-C/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_algodiff_reverse.Make.C.A)

Module C.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_reverse/Make/argument-1-C/index.html b/owl-base/Owl_algodiff_reverse/Make/argument-1-C/index.html deleted file mode 100644 index c3bba61fc..000000000 --- a/owl-base/Owl_algodiff_reverse/Make/argument-1-C/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -C (owl-base.Owl_algodiff_reverse.Make.C)

Parameter Make.C

include Owl_algodiff_core_sig.Sig
Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

val reverse_add : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_reverse/Make/index.html b/owl-base/Owl_algodiff_reverse/Make/index.html deleted file mode 100644 index 4250a83e5..000000000 --- a/owl-base/Owl_algodiff_reverse/Make/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make (owl-base.Owl_algodiff_reverse.Make)

Module Owl_algodiff_reverse.Make

Parameters

module C : sig ... end

Signature

val reverse_push : C.t -> C.t -> unit
val reverse_prop : C.t -> C.t -> unit
val reverse_reset : C.t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_types/.dummy b/owl-base/Owl_algodiff_types/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_algodiff_types/Make/argument-1-A/Linalg/index.html b/owl-base/Owl_algodiff_types/Make/argument-1-A/Linalg/index.html deleted file mode 100644 index 72be466c8..000000000 --- a/owl-base/Owl_algodiff_types/Make/argument-1-A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_algodiff_types.Make.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_types/Make/argument-1-A/Mat/index.html b/owl-base/Owl_algodiff_types/Make/argument-1-A/Mat/index.html deleted file mode 100644 index 660ebbbb8..000000000 --- a/owl-base/Owl_algodiff_types/Make/argument-1-A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_algodiff_types.Make.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_types/Make/argument-1-A/Scalar/index.html b/owl-base/Owl_algodiff_types/Make/argument-1-A/Scalar/index.html deleted file mode 100644 index 54ce3f4e8..000000000 --- a/owl-base/Owl_algodiff_types/Make/argument-1-A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_algodiff_types.Make.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_types/Make/argument-1-A/index.html b/owl-base/Owl_algodiff_types/Make/argument-1-A/index.html deleted file mode 100644 index 86631d66c..000000000 --- a/owl-base/Owl_algodiff_types/Make/argument-1-A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_algodiff_types.Make.A)

Parameter Make.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_types/Make/index.html b/owl-base/Owl_algodiff_types/Make/index.html deleted file mode 100644 index d3b082d1d..000000000 --- a/owl-base/Owl_algodiff_types/Make/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make (owl-base.Owl_algodiff_types.Make)

Module Owl_algodiff_types.Make

Parameters

Signature

type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
\ No newline at end of file diff --git a/owl-base/Owl_algodiff_types_sig/.dummy b/owl-base/Owl_algodiff_types_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_algodiff_types_sig/module-type-Sig/index.html b/owl-base/Owl_algodiff_types_sig/module-type-Sig/index.html deleted file mode 100644 index a7ad4bc28..000000000 --- a/owl-base/Owl_algodiff_types_sig/module-type-Sig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Sig (owl-base.Owl_algodiff_types_sig.Sig)

Module type Owl_algodiff_types_sig.Sig

type elt
type arr
type t =
  1. | F of elt
  2. | Arr of arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
\ No newline at end of file diff --git a/owl-base/Owl_base/.dummy b/owl-base/Owl_base/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_algodiff_primal_ops/.dummy b/owl-base/Owl_base_algodiff_primal_ops/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_algodiff_primal_ops/D/Linalg/index.html b/owl-base/Owl_base_algodiff_primal_ops/D/Linalg/index.html deleted file mode 100644 index f33d33d96..000000000 --- a/owl-base/Owl_base_algodiff_primal_ops/D/Linalg/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Linalg (owl-base.Owl_base_algodiff_primal_ops.D.Linalg)

Module D.Linalg

include module type of struct include Owl_base_linalg_d end
type elt = float
type complex_mat = Owl_base_dense_matrix_z.mat
type int32_mat = - (int32, Stdlib.Bigarray.int32_elt) Owl_base_dense_matrix_generic.t
include Owl_base_linalg_intf.Common - with type elt := elt - and type mat := mat - and type complex_mat := complex_mat - and type int32_mat := int32_mat
Basic functions
val inv : mat -> mat
val det : mat -> elt
val logdet : mat -> elt
val is_triu : mat -> bool
val is_tril : mat -> bool
val is_symmetric : mat -> bool
val is_diag : mat -> bool
Factorisation
val svd : ?thin:bool -> mat -> mat * mat * mat
val chol : ?upper:bool -> mat -> mat
Linear system of equations
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> mat -> mat -> mat
val sylvester : mat -> mat -> mat -> mat
val lyapunov : mat -> mat -> mat
val discrete_lyapunov : - ?solver:[ `default | `direct | `bilinear ] -> - mat -> - mat -> - mat
include Owl_base_linalg_intf.Real with type elt := elt and type mat := mat
val care : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
val dare : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
val qr : mat -> mat * mat
val lq : mat -> mat * mat
\ No newline at end of file diff --git a/owl-base/Owl_base_algodiff_primal_ops/D/Mat/index.html b/owl-base/Owl_base_algodiff_primal_ops/D/Mat/index.html deleted file mode 100644 index 44f15500c..000000000 --- a/owl-base/Owl_base_algodiff_primal_ops/D/Mat/index.html +++ /dev/null @@ -1,13 +0,0 @@ - -Mat (owl-base.Owl_base_algodiff_primal_ops.D.Mat)

Module D.Mat

val eye : - int -> - (float, Stdlib.Bigarray.float64_elt) Owl_base_dense_matrix_d.M.t
val tril : - ?k:int -> - ('a, 'b) Owl_base_dense_matrix_d.t -> - ('a, 'b) Owl_base_dense_matrix_d.t
val triu : - ?k:int -> - ('a, 'b) Owl_base_dense_matrix_d.t -> - ('a, 'b) Owl_base_dense_matrix_d.t
val diagm : - ?k:int -> - ('a, 'b) Owl_base_dense_matrix_d.t -> - ('a, 'b) Owl_base_dense_matrix_d.t
\ No newline at end of file diff --git a/owl-base/Owl_base_algodiff_primal_ops/D/index.html b/owl-base/Owl_base_algodiff_primal_ops/D/index.html deleted file mode 100644 index 2902b8840..000000000 --- a/owl-base/Owl_base_algodiff_primal_ops/D/index.html +++ /dev/null @@ -1,164 +0,0 @@ - -D (owl-base.Owl_base_algodiff_primal_ops.D)

Module Owl_base_algodiff_primal_ops.D

include module type of struct include Owl_base_dense_ndarray_d end
type elt = float
type arr = - (float, Stdlib.Bigarray.float64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
include Owl_base_dense_ndarray_intf.Common - with type arr := arr - and type elt := elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
include Owl_base_dense_ndarray_intf.Real - with type arr := arr - and type elt := elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_slices : ?axis:int -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> float -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> float -> arr
val dot : arr -> arr -> arr
val trace : arr -> elt
Helper functions
val float_to_elt : float -> elt
val elt_to_float : elt -> float
include Owl_base_dense_ndarray_intf.NN with type arr := arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
module Scalar = Owl_base_maths
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_base_algodiff_primal_ops/S/Linalg/index.html b/owl-base/Owl_base_algodiff_primal_ops/S/Linalg/index.html deleted file mode 100644 index 12230b400..000000000 --- a/owl-base/Owl_base_algodiff_primal_ops/S/Linalg/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Linalg (owl-base.Owl_base_algodiff_primal_ops.S.Linalg)

Module S.Linalg

include module type of struct include Owl_base_linalg_s end
type elt = float
type complex_mat = Owl_base_dense_matrix_c.mat
type int32_mat = - (int32, Stdlib.Bigarray.int32_elt) Owl_base_dense_matrix_generic.t
include Owl_base_linalg_intf.Common - with type elt := elt - and type mat := mat - and type complex_mat := complex_mat - and type int32_mat := int32_mat
Basic functions
val inv : mat -> mat
val det : mat -> elt
val logdet : mat -> elt
val is_triu : mat -> bool
val is_tril : mat -> bool
val is_symmetric : mat -> bool
val is_diag : mat -> bool
Factorisation
val svd : ?thin:bool -> mat -> mat * mat * mat
val chol : ?upper:bool -> mat -> mat
Linear system of equations
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> mat -> mat -> mat
val sylvester : mat -> mat -> mat -> mat
val lyapunov : mat -> mat -> mat
val discrete_lyapunov : - ?solver:[ `default | `direct | `bilinear ] -> - mat -> - mat -> - mat
include Owl_base_linalg_intf.Real with type elt := elt and type mat := mat
val care : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
val dare : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
val qr : mat -> mat * mat
val lq : mat -> mat * mat
\ No newline at end of file diff --git a/owl-base/Owl_base_algodiff_primal_ops/S/Mat/index.html b/owl-base/Owl_base_algodiff_primal_ops/S/Mat/index.html deleted file mode 100644 index 747c4f9c5..000000000 --- a/owl-base/Owl_base_algodiff_primal_ops/S/Mat/index.html +++ /dev/null @@ -1,13 +0,0 @@ - -Mat (owl-base.Owl_base_algodiff_primal_ops.S.Mat)

Module S.Mat

val eye : - int -> - (float, Stdlib.Bigarray.float32_elt) Owl_base_dense_matrix_s.M.t
val tril : - ?k:int -> - ('a, 'b) Owl_base_dense_matrix_s.t -> - ('a, 'b) Owl_base_dense_matrix_s.t
val triu : - ?k:int -> - ('a, 'b) Owl_base_dense_matrix_s.t -> - ('a, 'b) Owl_base_dense_matrix_s.t
val diagm : - ?k:int -> - ('a, 'b) Owl_base_dense_matrix_s.t -> - ('a, 'b) Owl_base_dense_matrix_s.t
\ No newline at end of file diff --git a/owl-base/Owl_base_algodiff_primal_ops/S/index.html b/owl-base/Owl_base_algodiff_primal_ops/S/index.html deleted file mode 100644 index 0c8654204..000000000 --- a/owl-base/Owl_base_algodiff_primal_ops/S/index.html +++ /dev/null @@ -1,225 +0,0 @@ - -S (owl-base.Owl_base_algodiff_primal_ops.S)

Module Owl_base_algodiff_primal_ops.S

include module type of struct include Owl_base_dense_ndarray.S end
include module type of struct include Owl_base_dense_ndarray_s end
type elt = float
type arr = - (float, Stdlib.Bigarray.float32_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
include Owl_base_dense_ndarray_intf.Common - with type arr := arr - and type elt := elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
include Owl_base_dense_ndarray_intf.Real - with type arr := arr - and type elt := elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_slices : ?axis:int -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> float -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> float -> arr
val dot : arr -> arr -> arr
val trace : arr -> elt
Helper functions
val float_to_elt : float -> elt
val elt_to_float : elt -> float
include Owl_base_dense_ndarray_intf.NN with type arr := arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
include module type of struct include Owl_base_dense_ndarray.Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
include sig ... end
val (.%{}) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
module Scalar = Owl_base_maths
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_base_complex/.dummy b/owl-base/Owl_base_complex/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_dense_common/.dummy b/owl-base/Owl_base_dense_common/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_dense_matrix_c/.dummy b/owl-base/Owl_base_dense_matrix_c/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_dense_matrix_d/.dummy b/owl-base/Owl_base_dense_matrix_d/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_dense_matrix_generic/.dummy b/owl-base/Owl_base_dense_matrix_generic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_dense_matrix_intf/.dummy b/owl-base/Owl_base_dense_matrix_intf/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_dense_matrix_intf/module-type-Common/index.html b/owl-base/Owl_base_dense_matrix_intf/module-type-Common/index.html deleted file mode 100644 index 5e5bcfe63..000000000 --- a/owl-base/Owl_base_dense_matrix_intf/module-type-Common/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Common (owl-base.Owl_base_dense_matrix_intf.Common)

Module type Owl_base_dense_matrix_intf.Common

type elt
type arr
val diagm : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_base_dense_matrix_s/.dummy b/owl-base/Owl_base_dense_matrix_s/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_dense_matrix_z/.dummy b/owl-base/Owl_base_dense_matrix_z/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_dense_ndarray/.dummy b/owl-base/Owl_base_dense_ndarray/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_dense_ndarray/C/index.html b/owl-base/Owl_base_dense_ndarray/C/index.html deleted file mode 100644 index aafcc9f85..000000000 --- a/owl-base/Owl_base_dense_ndarray/C/index.html +++ /dev/null @@ -1,73 +0,0 @@ - -C (owl-base.Owl_base_dense_ndarray.C)

Module Owl_base_dense_ndarray.C

include module type of struct include Owl_base_dense_ndarray_c end
type elt = Stdlib.Complex.t
type arr = - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
include Owl_base_dense_ndarray_intf.Common - with type arr := arr - and type elt := elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
include sig ... end
val (.%{}) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
\ No newline at end of file diff --git a/owl-base/Owl_base_dense_ndarray/D/index.html b/owl-base/Owl_base_dense_ndarray/D/index.html deleted file mode 100644 index f56f46071..000000000 --- a/owl-base/Owl_base_dense_ndarray/D/index.html +++ /dev/null @@ -1,225 +0,0 @@ - -D (owl-base.Owl_base_dense_ndarray.D)

Module Owl_base_dense_ndarray.D

include module type of struct include Owl_base_dense_ndarray_d end
type elt = float
type arr = - (float, Stdlib.Bigarray.float64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
include Owl_base_dense_ndarray_intf.Common - with type arr := arr - and type elt := elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
include Owl_base_dense_ndarray_intf.Real - with type arr := arr - and type elt := elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_slices : ?axis:int -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> float -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> float -> arr
val dot : arr -> arr -> arr
val trace : arr -> elt
Helper functions
val float_to_elt : float -> elt
val elt_to_float : elt -> float
include Owl_base_dense_ndarray_intf.NN with type arr := arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
include sig ... end
val (.%{}) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
\ No newline at end of file diff --git a/owl-base/Owl_base_dense_ndarray/Generic/index.html b/owl-base/Owl_base_dense_ndarray/Generic/index.html deleted file mode 100644 index 62f2ce234..000000000 --- a/owl-base/Owl_base_dense_ndarray/Generic/index.html +++ /dev/null @@ -1,309 +0,0 @@ - -Generic (owl-base.Owl_base_dense_ndarray.Generic)

Module Owl_base_dense_ndarray.Generic

include module type of struct include Owl_base_dense_ndarray_generic end

About the comparison of two complex numbers x and y, Owl uses the following conventions: 1) x and y are equal iff both real and imaginary parts are equal; 2) x is less than y if the magnitude of x is less than the magnitude of x; in case both x and y have the same magnitudes, x is less than x if the phase of x is less than the phase of y; 3) less or equal, greater, greater or equal relation can be further defined atop of the aforementioned conventions.

Type definition
type ('a, 'b) t = ('a, 'b, Stdlib.Bigarray.c_layout) Stdlib.Bigarray.Genarray.t

Refer to :doc:`owl_dense_ndarray_generic`

type ('a, 'b) kind = ('a, 'b) Stdlib.Bigarray.kind

Refer to :doc:`owl_dense_ndarray_generic`

Create Ndarrays
val empty : ('a, 'b) kind -> int array -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val create : ('a, 'b) kind -> int array -> 'a -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val init : ('a, 'b) kind -> int array -> (int -> 'a) -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val init_nd : ('a, 'b) kind -> int array -> (int array -> 'a) -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val zeros : ('a, 'b) kind -> int array -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val ones : ('a, 'b) kind -> int array -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val eye : ('a, 'b) kind -> int -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val uniform : ('a, 'b) kind -> ?a:'a -> ?b:'a -> int array -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val gaussian : ('a, 'b) kind -> ?mu:'a -> ?sigma:'a -> int array -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val sequential : ('a, 'b) kind -> ?a:'a -> ?step:'a -> int array -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val bernoulli : ('a, 'b) kind -> ?p:float -> int array -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

Obtain basic properties
val shape : ('a, 'b) t -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val num_dims : ('a, 'b) t -> int

Refer to :doc:`owl_dense_ndarray_generic`

val nth_dim : ('a, 'b) t -> int -> int

Refer to :doc:`owl_dense_ndarray_generic`

val numel : ('a, 'b) t -> int

Refer to :doc:`owl_dense_ndarray_generic`

val kind : ('a, 'b) t -> ('a, 'b) kind

Refer to :doc:`owl_dense_ndarray_generic`

val strides : ('a, 'b) t -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : ('a, 'b) t -> int array

Refer to :doc:`owl_dense_ndarray_generic`

Manipulate Ndarrays
val get : ('a, 'b) t -> int array -> 'a

Refer to :doc:`owl_dense_ndarray_generic`

val set : ('a, 'b) t -> int array -> 'a -> unit

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> ('a, 'b) t -> ('a, 'b) t -> unit

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> ('a, 'b) t -> ('a, 'b) t -> unit

Refer to :doc:`owl_dense_ndarray_generic`

val reset : ('a, 'b) t -> unit

Refer to :doc:`owl_dense_ndarray_generic`

val fill : ('a, 'b) t -> 'a -> unit

Refer to :doc:`owl_dense_ndarray_generic`

val copy : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val copy_ : out:('a, 'b) t -> ('a, 'b) t -> unit

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : ('a, 'b) t -> int array -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val reverse : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val tile : ('a, 'b) t -> int array -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val repeat : ('a, 'b) t -> int array -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:'a -> int list list -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : ?axis:int -> ('a, 'b) t array -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : ?axis:int -> ('a, 'b) t array -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val squeeze : ?axis:int array -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val expand : ?hi:bool -> ('a, 'b) t -> int -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val split : ?axis:int -> int array -> ('a, 'b) t -> ('a, 'b) t array

Refer to :doc:`owl_dense_ndarray_generic`

val draw : ?axis:int -> ('a, 'b) t -> int -> ('a, 'b) t * int array

Refer to :doc:`owl_dense_ndarray_generic`

val one_hot : int -> ('a, 'b) t -> ('a, 'b) t

TODO: not implemented

Iterate array elements
val iteri : (int -> 'a -> unit) -> ('a, 'b) t -> unit

Refer to :doc:`owl_dense_ndarray_generic`

val iter : ('a -> unit) -> ('a, 'b) t -> unit

Refer to :doc:`owl_dense_ndarray_generic`

val mapi : (int -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val map : ('a -> 'a) -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val filteri : (int -> 'a -> bool) -> ('a, 'b) t -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val filter : ('a -> bool) -> ('a, 'b) t -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val foldi : - ?axis:int -> - (int -> 'a -> 'a -> 'a) -> - 'a -> - ('a, 'b) t -> - ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val fold : ?axis:int -> ('a -> 'a -> 'a) -> 'a -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val scani : ?axis:int -> (int -> 'a -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val scan : ?axis:int -> ('a -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

Examination & Comparison
val exists : ('a -> bool) -> ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val not_exists : ('a -> bool) -> ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val for_all : ('a -> bool) -> ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val is_zero : ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val is_positive : ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val is_negative : ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val is_nonpositive : ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val is_nonnegative : ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val is_normal : ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val not_nan : ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val not_inf : ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val equal : ('a, 'b) t -> ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val not_equal : ('a, 'b) t -> ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val greater : ('a, 'b) t -> ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val less : ('a, 'b) t -> ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val greater_equal : ('a, 'b) t -> ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val less_equal : ('a, 'b) t -> ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val elt_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val elt_not_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val elt_less : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val elt_greater : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val elt_less_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val elt_greater_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val equal_scalar : ('a, 'b) t -> 'a -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val not_equal_scalar : ('a, 'b) t -> 'a -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val less_scalar : ('a, 'b) t -> 'a -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val greater_scalar : ('a, 'b) t -> 'a -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val less_equal_scalar : ('a, 'b) t -> 'a -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val greater_equal_scalar : ('a, 'b) t -> 'a -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val elt_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val elt_not_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val elt_less_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val elt_greater_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val elt_less_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val elt_greater_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val approx_equal : ?eps:float -> (float, 'b) t -> (float, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val approx_equal_scalar : ?eps:float -> (float, 'b) t -> float -> bool

Refer to :doc:`owl_dense_ndarray_generic`

val approx_elt_equal : - ?eps:float -> - (float, 'b) t -> - (float, 'b) t -> - (float, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val approx_elt_equal_scalar : - ?eps:float -> - (float, 'b) t -> - float -> - (float, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

Input/Output functions
val of_array : ('a, 'b) kind -> 'a array -> int array -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:('a -> string) -> - ('a, 'b) t -> - unit

Refer to :doc:`owl_dense_ndarray_generic`

val load : ('a, 'b) kind -> string -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

Unary math operators
val min : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val max : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : ('a, 'b) t -> 'a

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : ('a, 'b) t -> 'a

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val min' : ('a, 'b) t -> 'a

Refer to :doc:`owl_dense_ndarray_generic`

val max' : ('a, 'b) t -> 'a

Refer to :doc:`owl_dense_ndarray_generic`

val abs : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val conj : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val reci : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : (float, 'a) t -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val cbrt : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val exp2 : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val exp10 : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val expm1 : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val log : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val log1p : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val round : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val trunc : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val fix : ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val erf : (float, 'b) t -> (float, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val erfc : (float, 'b) t -> (float, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : (float, 'a) t -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : (float, 'a) t -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : (float, 'a) t -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : (float, 'a) t -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : (float, 'a) t -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> (float, 'a) t -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : (float, 'b) t -> float

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : (float, 'b) t -> float

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : (float, 'b) t -> float

Refer to :doc:`owl_dense_ndarray_generic`

Binary math operators
val add : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val div : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val add_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val sub_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val mul_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val div_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val scalar_add : 'a -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val scalar_sub : 'a -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val scalar_mul : 'a -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val scalar_div : 'a -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val scalar_pow : 'a -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val pow_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : (float, 'a) t -> (float, 'a) t -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val scalar_atan2 : float -> (float, 'a) t -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2_scalar : (float, 'a) t -> float -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val fmod : (float, 'a) t -> (float, 'a) t -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val fmod_scalar : (float, 'a) t -> float -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val scalar_fmod : float -> (float, 'a) t -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val clip_by_value : - ?amin:float -> - ?amax:float -> - (float, 'b) t -> - (float, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val clip_by_l2norm : float -> (float, 'a) t -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val fma : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : - ?padding:Owl_types.padding -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : (float, 'a) t -> int array -> (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d_backward_input : - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d_backward_kernel : - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d_backward_input : - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d_backward_kernel : - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d_backward_input : - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d_backward_kernel : - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d_backward_input : - (float, 'a) t -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d_backward_kernel : - (float, 'a) t -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d_backward_input : - (float, 'a) t -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d_backward_kernel : - (float, 'a) t -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d_backward_input : - (float, 'a) t -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d_backward_kernel : - (float, 'a) t -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d_backward_input : - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d_backward_kernel : - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d_backward_input : - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d_backward_kernel : - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d_backward_input : - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d_backward_kernel : - (float, 'a) t -> - (float, 'a) t -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d_backward : - Owl_types.padding -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d_backward : - Owl_types.padding -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d_backward : - Owl_types.padding -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d_backward : - Owl_types.padding -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d_backward : - Owl_types.padding -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d_backward : - Owl_types.padding -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d_backward : - (float, 'a) t -> - int array -> - (float, 'a) t -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`

Helper functions
val sum_slices : ?axis:int -> (float, 'b) t -> (float, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`

Matrix functions
val row_num : ('a, 'b) t -> int

Refer to :doc:`owl_dense_matrix_generic`

val col_num : ('a, 'b) t -> int

Refer to :doc:`owl_dense_matrix_generic`

val row : ('a, 'b) t -> int -> ('a, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val rows : ('a, 'b) t -> int array -> ('a, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val copy_row_to : ('a, 'b) t -> ('a, 'b) t -> int -> unit

Refer to :doc:`owl_dense_matrix_generic`

val copy_col_to : ('a, 'b) t -> ('a, 'b) t -> int -> unit

Refer to :doc:`owl_dense_matrix_generic`

val dot : (float, 'b) t -> (float, 'b) t -> (float, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val diag : ?k:int -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val trace : (float, 'b) t -> float

Refer to :doc:`owl_dense_matrix_generic`

val to_rows : ('a, 'b) t -> ('a, 'b) t array

Refer to :doc:`owl_dense_matrix_generic`

val of_rows : ('a, 'b) t array -> ('a, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val to_cols : ('a, 'b) t -> ('a, 'b) t array

TODO

val of_cols : ('a, 'b) t array -> ('a, 'b) t

TODO

val of_arrays : ('a, 'b) kind -> 'a array array -> ('a, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val draw_rows : - ?replacement:bool -> - ('a, 'b) t -> - int -> - ('a, 'b) t * int array

Refer to :doc:`owl_dense_matrix_generic`

val draw_rows2 : - ?replacement:bool -> - ('a, 'b) t -> - ('a, 'b) t -> - int -> - ('a, 'b) t * ('a, 'b) t * int array

Refer to :doc:`owl_dense_matrix_generic`

Helper functions
val float_to_elt : 'a -> 'a

Identity function to deal with the type conversion required by other functors.

val elt_to_float : 'a -> 'a

Identity function to deal with the type conversion required by other functors.

include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
include sig ... end
val (.%{}) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
\ No newline at end of file diff --git a/owl-base/Owl_base_dense_ndarray/Operator/index.html b/owl-base/Owl_base_dense_ndarray/Operator/index.html deleted file mode 100644 index 190764b93..000000000 --- a/owl-base/Owl_base_dense_ndarray/Operator/index.html +++ /dev/null @@ -1,63 +0,0 @@ - -Operator (owl-base.Owl_base_dense_ndarray.Operator)

Module Owl_base_dense_ndarray.Operator

include sig ... end
val (+$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
include sig ... end
val (.%{}) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
\ No newline at end of file diff --git a/owl-base/Owl_base_dense_ndarray/S/index.html b/owl-base/Owl_base_dense_ndarray/S/index.html deleted file mode 100644 index ae7835afa..000000000 --- a/owl-base/Owl_base_dense_ndarray/S/index.html +++ /dev/null @@ -1,225 +0,0 @@ - -S (owl-base.Owl_base_dense_ndarray.S)

Module Owl_base_dense_ndarray.S

include module type of struct include Owl_base_dense_ndarray_s end
type elt = float
type arr = - (float, Stdlib.Bigarray.float32_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
include Owl_base_dense_ndarray_intf.Common - with type arr := arr - and type elt := elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
include Owl_base_dense_ndarray_intf.Real - with type arr := arr - and type elt := elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_slices : ?axis:int -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> float -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> float -> arr
val dot : arr -> arr -> arr
val trace : arr -> elt
Helper functions
val float_to_elt : float -> elt
val elt_to_float : elt -> float
include Owl_base_dense_ndarray_intf.NN with type arr := arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
include sig ... end
val (.%{}) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
\ No newline at end of file diff --git a/owl-base/Owl_base_dense_ndarray/Z/index.html b/owl-base/Owl_base_dense_ndarray/Z/index.html deleted file mode 100644 index 4b47bfb0c..000000000 --- a/owl-base/Owl_base_dense_ndarray/Z/index.html +++ /dev/null @@ -1,73 +0,0 @@ - -Z (owl-base.Owl_base_dense_ndarray.Z)

Module Owl_base_dense_ndarray.Z

include module type of struct include Owl_base_dense_ndarray_z end
type elt = Stdlib.Complex.t
type arr = - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
include Owl_base_dense_ndarray_intf.Common - with type arr := arr - and type elt := elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - bool
include sig ... end
val (.%{}) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_base_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_base_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
\ No newline at end of file diff --git a/owl-base/Owl_base_dense_ndarray_c/.dummy b/owl-base/Owl_base_dense_ndarray_c/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_dense_ndarray_d/.dummy b/owl-base/Owl_base_dense_ndarray_d/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_dense_ndarray_generic/.dummy b/owl-base/Owl_base_dense_ndarray_generic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_dense_ndarray_intf/.dummy b/owl-base/Owl_base_dense_ndarray_intf/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_dense_ndarray_intf/module-type-Common/index.html b/owl-base/Owl_base_dense_ndarray_intf/module-type-Common/index.html deleted file mode 100644 index 061a98b42..000000000 --- a/owl-base/Owl_base_dense_ndarray_intf/module-type-Common/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Common (owl-base.Owl_base_dense_ndarray_intf.Common)

Module type Owl_base_dense_ndarray_intf.Common

type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
\ No newline at end of file diff --git a/owl-base/Owl_base_dense_ndarray_intf/module-type-NN/index.html b/owl-base/Owl_base_dense_ndarray_intf/module-type-NN/index.html deleted file mode 100644 index 65e3e98e3..000000000 --- a/owl-base/Owl_base_dense_ndarray_intf/module-type-NN/index.html +++ /dev/null @@ -1,152 +0,0 @@ - -NN (owl-base.Owl_base_dense_ndarray_intf.NN)

Module type Owl_base_dense_ndarray_intf.NN

type arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_base_dense_ndarray_intf/module-type-Real/index.html b/owl-base/Owl_base_dense_ndarray_intf/module-type-Real/index.html deleted file mode 100644 index 205e6deca..000000000 --- a/owl-base/Owl_base_dense_ndarray_intf/module-type-Real/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Real (owl-base.Owl_base_dense_ndarray_intf.Real)

Module type Owl_base_dense_ndarray_intf.Real

type elt
type arr
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_slices : ?axis:int -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> float -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> float -> arr
val dot : arr -> arr -> arr
val trace : arr -> elt
Helper functions
val float_to_elt : float -> elt
val elt_to_float : elt -> float
\ No newline at end of file diff --git a/owl-base/Owl_base_dense_ndarray_s/.dummy b/owl-base/Owl_base_dense_ndarray_s/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_dense_ndarray_z/.dummy b/owl-base/Owl_base_dense_ndarray_z/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_linalg_c/.dummy b/owl-base/Owl_base_linalg_c/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_linalg_d/.dummy b/owl-base/Owl_base_linalg_d/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_linalg_generic/.dummy b/owl-base/Owl_base_linalg_generic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_linalg_intf/.dummy b/owl-base/Owl_base_linalg_intf/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_linalg_intf/module-type-Common/index.html b/owl-base/Owl_base_linalg_intf/module-type-Common/index.html deleted file mode 100644 index 27e5713d7..000000000 --- a/owl-base/Owl_base_linalg_intf/module-type-Common/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Common (owl-base.Owl_base_linalg_intf.Common)

Module type Owl_base_linalg_intf.Common

type elt
type mat
type complex_mat
type int32_mat
Basic functions
val inv : mat -> mat
val det : mat -> elt
val logdet : mat -> elt
val is_triu : mat -> bool
val is_tril : mat -> bool
val is_symmetric : mat -> bool
val is_diag : mat -> bool
Factorisation
val svd : ?thin:bool -> mat -> mat * mat * mat
val chol : ?upper:bool -> mat -> mat
val qr : ?thin:bool -> ?pivot:bool -> mat -> mat * mat * int32_mat
val lq : ?thin:bool -> mat -> mat * mat
Linear system of equations
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> mat -> mat -> mat
val sylvester : mat -> mat -> mat -> mat
val lyapunov : mat -> mat -> mat
val discrete_lyapunov : - ?solver:[ `default | `direct | `bilinear ] -> - mat -> - mat -> - mat
\ No newline at end of file diff --git a/owl-base/Owl_base_linalg_intf/module-type-Real/index.html b/owl-base/Owl_base_linalg_intf/module-type-Real/index.html deleted file mode 100644 index 245391651..000000000 --- a/owl-base/Owl_base_linalg_intf/module-type-Real/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Real (owl-base.Owl_base_linalg_intf.Real)

Module type Owl_base_linalg_intf.Real

type elt
type mat
val care : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
val dare : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
\ No newline at end of file diff --git a/owl-base/Owl_base_linalg_s/.dummy b/owl-base/Owl_base_linalg_s/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_linalg_z/.dummy b/owl-base/Owl_base_linalg_z/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_maths/.dummy b/owl-base/Owl_base_maths/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_slicing/.dummy b/owl-base/Owl_base_slicing/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_stats/.dummy b/owl-base/Owl_base_stats/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_stats_dist_bernoulli/.dummy b/owl-base/Owl_base_stats_dist_bernoulli/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_stats_dist_cauchy/.dummy b/owl-base/Owl_base_stats_dist_cauchy/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_stats_dist_exponential/.dummy b/owl-base/Owl_base_stats_dist_exponential/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_stats_dist_gamma/.dummy b/owl-base/Owl_base_stats_dist_gamma/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_stats_dist_gaussian/.dummy b/owl-base/Owl_base_stats_dist_gaussian/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_stats_dist_gumbel1/.dummy b/owl-base/Owl_base_stats_dist_gumbel1/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_stats_dist_gumbel2/.dummy b/owl-base/Owl_base_stats_dist_gumbel2/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_stats_dist_uniform/.dummy b/owl-base/Owl_base_stats_dist_uniform/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_base_stats_prng/.dummy b/owl-base/Owl_base_stats_prng/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation/.dummy b/owl-base/Owl_computation/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_cpu_device/.dummy b/owl-base/Owl_computation_cpu_device/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_cpu_device/Make/argument-1-A/Linalg/index.html b/owl-base/Owl_computation_cpu_device/Make/argument-1-A/Linalg/index.html deleted file mode 100644 index 05572e712..000000000 --- a/owl-base/Owl_computation_cpu_device/Make/argument-1-A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_cpu_device.Make.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_device/Make/argument-1-A/Mat/index.html b/owl-base/Owl_computation_cpu_device/Make/argument-1-A/Mat/index.html deleted file mode 100644 index a7fbfd8cf..000000000 --- a/owl-base/Owl_computation_cpu_device/Make/argument-1-A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_cpu_device.Make.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_device/Make/argument-1-A/Scalar/index.html b/owl-base/Owl_computation_cpu_device/Make/argument-1-A/Scalar/index.html deleted file mode 100644 index bde3a222b..000000000 --- a/owl-base/Owl_computation_cpu_device/Make/argument-1-A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_cpu_device.Make.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_device/Make/argument-1-A/index.html b/owl-base/Owl_computation_cpu_device/Make/argument-1-A/index.html deleted file mode 100644 index 1a66aa359..000000000 --- a/owl-base/Owl_computation_cpu_device/Make/argument-1-A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_cpu_device.Make.A)

Parameter Make.A

include Owl_types_ndarray_mutable.Sig
include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_device/Make/index.html b/owl-base/Owl_computation_cpu_device/Make/index.html deleted file mode 100644 index 32d0a0b36..000000000 --- a/owl-base/Owl_computation_cpu_device/Make/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make (owl-base.Owl_computation_cpu_device.Make)

Module Owl_computation_cpu_device.Make

Parameters

Signature

module A = A
type device = {
  1. device_type : Owl_types.device_type;
  2. initialised : bool;
}
type value =
  1. | ArrVal of A.arr
  2. | EltVal of A.elt
val make_device : unit -> device
val arr_to_value : A.arr -> value
val value_to_arr : value -> A.arr
val elt_to_value : A.elt -> value
val value_to_elt : value -> A.elt
val value_to_float : value -> float
val is_arr : value -> bool
val is_elt : value -> bool
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/.dummy b/owl-base/Owl_computation_cpu_engine/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Linalg/index.html b/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Linalg/index.html deleted file mode 100644 index b66483b4e..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_cpu_engine.Make.Graph.Optimiser.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Mat/index.html b/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Mat/index.html deleted file mode 100644 index 9d7b1db50..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_cpu_engine.Make.Graph.Optimiser.Operator.Mat)

Module Operator.Mat

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Scalar/index.html b/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Scalar/index.html deleted file mode 100644 index a89b442cb..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_cpu_engine.Make.Graph.Optimiser.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index b89846e59..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_cpu_engine.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 10cc8ad94..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_cpu_engine.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 02bc89a1c..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_cpu_engine.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index 74c30c7ef..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,381 +0,0 @@ - -A (owl-base.Owl_computation_cpu_engine.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 87f1d4a35..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Device (owl-base.Owl_computation_cpu_engine.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device)

Module Type.Device

module A : sig ... end
val make_device : unit -> device
val arr_to_value : A.arr -> value
val value_to_arr : value -> A.arr
val elt_to_value : A.elt -> value
val value_to_elt : value -> A.elt
val value_to_float : value -> float
val is_arr : value -> bool
val is_elt : value -> bool
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index 0e04d08db..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,17 +0,0 @@ - -Type (owl-base.Owl_computation_cpu_engine.Make.Graph.Optimiser.Operator.Symbol.Shape.Type)

Module Shape.Type

module Device : sig ... end
and block = - Make_Nested(Owl_computation_engine.Make_Graph(Owl_computation_cpu_device.Make(A))).Graph.Optimiser.Operator.Symbol.Shape.Type.block = - {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}
and attr = - Make_Nested(Owl_computation_engine.Make_Graph(Owl_computation_cpu_device.Make(A))).Graph.Optimiser.Operator.Symbol.Shape.Type.attr = - {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}
and op = - Make_Nested(Owl_computation_engine.Make_Graph(Owl_computation_cpu_device.Make(A))).Graph.Optimiser.Operator.Symbol.Shape.Type.op = -
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/index.html b/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/index.html deleted file mode 100644 index 481a4b9a3..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_cpu_engine.Make.Graph.Optimiser.Operator.Symbol.Shape)

Module Symbol.Shape

module Type : sig ... end
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/index.html b/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/index.html deleted file mode 100644 index 447a1730a..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_cpu_engine.Make.Graph.Optimiser.Operator.Symbol)

Module Operator.Symbol

module Shape : sig ... end
val op_to_str : Shape.Type.op -> string
val is_random_variable : Shape.Type.op -> bool
val refnum : 'a Owl_graph.node -> int
val node_shape : Shape.Type.attr Owl_graph.node -> int array
val node_numel : Shape.Type.attr Owl_graph.node -> int
val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool
val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit
val shape_to_str : int array option array -> string
val node_to_str : Shape.Type.attr Owl_graph.node -> string
val node_to_arr : Shape.Type.t -> Shape.Type.arr
val arr_to_node : Shape.Type.arr -> Shape.Type.t
val node_to_elt : Shape.Type.t -> Shape.Type.elt
val elt_to_node : Shape.Type.elt -> Shape.Type.t
val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node
val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node
val var_arr : ?shape:int array -> string -> Shape.Type.arr
val var_elt : string -> Shape.Type.elt
val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr
val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt
val new_block_id : unit -> int
val make_empty_block : ?block_id:int -> int -> Shape.Type.block
val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit
val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit
val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option
val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit
val get_block_id : Shape.Type.attr Owl_graph.node -> int
val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit
val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit
val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit
val get_reuse : Shape.Type.attr Owl_graph.node -> bool
val is_shared : Shape.Type.attr Owl_graph.node -> bool
val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array
val is_var : Shape.Type.attr Owl_graph.node -> bool
val is_const : Shape.Type.attr Owl_graph.node -> bool
val is_node_arr : Shape.Type.attr Owl_graph.node -> bool
val is_node_elt : Shape.Type.attr Owl_graph.node -> bool
val is_assigned : Shape.Type.attr Owl_graph.node -> bool
val check_assigned : Shape.Type.attr Owl_graph.node -> unit
val is_valid : Shape.Type.attr Owl_graph.node -> bool
val validate : Shape.Type.attr Owl_graph.node -> unit
val invalidate : Shape.Type.attr Owl_graph.node -> unit
val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit
val is_freeze : Shape.Type.attr Owl_graph.node -> bool
val freeze : Shape.Type.attr Owl_graph.node -> unit
val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit
val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit
val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit
val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit
val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit
val float_to_elt : float -> Shape.Type.elt
val elt_to_float : Shape.Type.elt -> float
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/index.html b/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/index.html deleted file mode 100644 index b9361f3e6..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_computation_cpu_engine.Make.Graph.Optimiser.Operator)

Module Optimiser.Operator

module Symbol : sig ... end
val empty : int array -> Symbol.Shape.Type.arr
val zeros : int array -> Symbol.Shape.Type.arr
val ones : int array -> Symbol.Shape.Type.arr
val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr
val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr
val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr
val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr
val shape : Symbol.Shape.Type.arr -> int array
val numel : Symbol.Shape.Type.arr -> int
val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit
val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr
val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit
val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit
val copy_ : out:'a -> 'b -> 'c
val reset : Symbol.Shape.Type.arr -> unit
val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr
val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr
val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr
val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr
val split : ?axis:int -> 'a -> 'b -> 'c
val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array
val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit
val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr
val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr
val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val row_num : Symbol.Shape.Type.arr -> int
val col_num : Symbol.Shape.Type.arr -> int
val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit
val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit
val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val to_rows : Symbol.Shape.Type.arr -> 'a array
val to_cols : Symbol.Shape.Type.arr -> 'a array
val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr
val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr
val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/index.html b/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/index.html deleted file mode 100644 index 04dd205bd..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/Graph/Optimiser/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Optimiser (owl-base.Owl_computation_cpu_engine.Make.Graph.Optimiser)

Module Graph.Optimiser

module Operator : sig ... end
val estimate_complexity : 'a Owl_graph.node array -> int * int
val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/Graph/index.html b/owl-base/Owl_computation_cpu_engine/Make/Graph/index.html deleted file mode 100644 index 22f045e21..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/Graph/index.html +++ /dev/null @@ -1,34 +0,0 @@ - -Graph (owl-base.Owl_computation_cpu_engine.Make.Graph)

Module Make.Graph

module Optimiser : sig ... end
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string
val graph_to_dot : graph -> string
val graph_to_trace : graph -> string
val save_graph : 'a -> string -> unit
val load_graph : string -> 'a * 'b
val invalidate_rvs : graph -> unit
val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool
val update_iopair : graph -> unit
val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array
val optimise : graph -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/argument-1-A/Linalg/index.html b/owl-base/Owl_computation_cpu_engine/Make/argument-1-A/Linalg/index.html deleted file mode 100644 index 3db76c51c..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/argument-1-A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_cpu_engine.Make.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/argument-1-A/Mat/index.html b/owl-base/Owl_computation_cpu_engine/Make/argument-1-A/Mat/index.html deleted file mode 100644 index 1fe44100d..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/argument-1-A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_cpu_engine.Make.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/argument-1-A/Scalar/index.html b/owl-base/Owl_computation_cpu_engine/Make/argument-1-A/Scalar/index.html deleted file mode 100644 index c0797e9df..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/argument-1-A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_cpu_engine.Make.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/argument-1-A/index.html b/owl-base/Owl_computation_cpu_engine/Make/argument-1-A/index.html deleted file mode 100644 index 9adc1b9ec..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/argument-1-A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_cpu_engine.Make.A)

Parameter Make.A

include Owl_types_ndarray_mutable.Sig
include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make/index.html b/owl-base/Owl_computation_cpu_engine/Make/index.html deleted file mode 100644 index 87cf03755..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make/index.html +++ /dev/null @@ -1,500 +0,0 @@ - -Make (owl-base.Owl_computation_cpu_engine.Make)

Module Owl_computation_cpu_engine.Make

Parameters

Signature

include sig ... end
module Graph : sig ... end
val eval_graph : Graph.graph -> unit
module Optimiser = Graph.Optimiser
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string
val graph_to_dot : graph -> string
val graph_to_trace : graph -> string
val save_graph : 'a -> string -> unit
val load_graph : string -> 'a * 'b
val invalidate_rvs : graph -> unit
val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool
val update_iopair : graph -> unit
val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array
val optimise : graph -> unit
module Operator = Graph.Optimiser.Operator
val estimate_complexity : 'a Owl_graph.node array -> int * int
val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit
val empty : int array -> Symbol.Shape.Type.arr
val zeros : int array -> Symbol.Shape.Type.arr
val ones : int array -> Symbol.Shape.Type.arr
val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr
val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr
val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr
val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr
val shape : Symbol.Shape.Type.arr -> int array
val numel : Symbol.Shape.Type.arr -> int
val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit
val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr
val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit
val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit
val copy_ : out:'a -> 'b -> 'c
val reset : Symbol.Shape.Type.arr -> unit
val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr
val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr
val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr
val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr
val split : ?axis:int -> 'a -> 'b -> 'c
val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array
val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit
val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr
val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr
val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val row_num : Symbol.Shape.Type.arr -> int
val col_num : Symbol.Shape.Type.arr -> int
val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit
val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit
val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val to_rows : Symbol.Shape.Type.arr -> 'a array
val to_cols : Symbol.Shape.Type.arr -> 'a array
val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr
val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr
val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array
val op_to_str : Shape.Type.op -> string
val is_random_variable : Shape.Type.op -> bool
val refnum : 'a Owl_graph.node -> int
val node_shape : Shape.Type.attr Owl_graph.node -> int array
val node_numel : Shape.Type.attr Owl_graph.node -> int
val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool
val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit
val shape_to_str : int array option array -> string
val node_to_str : Shape.Type.attr Owl_graph.node -> string
val node_to_arr : Shape.Type.t -> Shape.Type.arr
val arr_to_node : Shape.Type.arr -> Shape.Type.t
val node_to_elt : Shape.Type.t -> Shape.Type.elt
val elt_to_node : Shape.Type.elt -> Shape.Type.t
val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node
val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node
val var_arr : ?shape:int array -> string -> Shape.Type.arr
val var_elt : string -> Shape.Type.elt
val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr
val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt
val new_block_id : unit -> int
val make_empty_block : ?block_id:int -> int -> Shape.Type.block
val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit
val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit
val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option
val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit
val get_block_id : Shape.Type.attr Owl_graph.node -> int
val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit
val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit
val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit
val get_reuse : Shape.Type.attr Owl_graph.node -> bool
val is_shared : Shape.Type.attr Owl_graph.node -> bool
val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array
val is_var : Shape.Type.attr Owl_graph.node -> bool
val is_const : Shape.Type.attr Owl_graph.node -> bool
val is_node_arr : Shape.Type.attr Owl_graph.node -> bool
val is_node_elt : Shape.Type.attr Owl_graph.node -> bool
val is_assigned : Shape.Type.attr Owl_graph.node -> bool
val check_assigned : Shape.Type.attr Owl_graph.node -> unit
val is_valid : Shape.Type.attr Owl_graph.node -> bool
val validate : Shape.Type.attr Owl_graph.node -> unit
val invalidate : Shape.Type.attr Owl_graph.node -> unit
val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit
val is_freeze : Shape.Type.attr Owl_graph.node -> bool
val freeze : Shape.Type.attr Owl_graph.node -> unit
val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit
val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit
val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit
val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit
val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit
val float_to_elt : float -> Shape.Type.elt
val elt_to_float : Shape.Type.elt -> float
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array
and block = - Make_Nested(Owl_computation_engine.Make_Graph(Owl_computation_cpu_device.Make(A))).Graph.Optimiser.Operator.Symbol.Shape.Type.block = - {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}
and attr = - Make_Nested(Owl_computation_engine.Make_Graph(Owl_computation_cpu_device.Make(A))).Graph.Optimiser.Operator.Symbol.Shape.Type.attr = - {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}
and op = - Make_Nested(Owl_computation_engine.Make_Graph(Owl_computation_cpu_device.Make(A))).Graph.Optimiser.Operator.Symbol.Shape.Type.op = -
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
val make_device : unit -> device
val arr_to_value : A.arr -> value
val value_to_arr : value -> A.arr
val elt_to_value : A.elt -> value
val value_to_elt : value -> A.elt
val value_to_float : value -> float
val is_arr : value -> bool
val is_elt : value -> bool
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/CG_Eval/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/CG_Eval/index.html deleted file mode 100644 index bcf385d32..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/CG_Eval/index.html +++ /dev/null @@ -1,57 +0,0 @@ - -CG_Eval (owl-base.Owl_computation_cpu_engine.Make_Nested.CG_Eval)

Module Make_Nested.CG_Eval

val invalidate_opt : - Graph.Optimiser.Operator.Symbol.Shape.Type.attr Owl_graph.node option -> - unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/CG_Init/MultiMap/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/CG_Init/MultiMap/index.html deleted file mode 100644 index e0d8662d2..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/CG_Init/MultiMap/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MultiMap (owl-base.Owl_computation_cpu_engine.Make_Nested.CG_Init.MultiMap)

Module CG_Init.MultiMap

type key = int
val empty : 'a t
val is_empty : 'a t -> bool
val mem : key -> 'a t -> bool
val add : key -> 'a -> 'a t -> 'a t
val remove : key -> 'a t -> 'a t
val find : key -> 'a t -> 'a
val max_binding : 'a t -> key * 'a
val find_first_opt : (key -> bool) -> 'a t -> (key * 'a) option
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/CG_Init/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/CG_Init/index.html deleted file mode 100644 index f6da68d7e..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/CG_Init/index.html +++ /dev/null @@ -1,13 +0,0 @@ - -CG_Init (owl-base.Owl_computation_cpu_engine.Make_Nested.CG_Init)

Module Make_Nested.CG_Init

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Linalg/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Linalg/index.html deleted file mode 100644 index 2014dcac9..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_cpu_engine.Make_Nested.Graph.Optimiser.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Mat/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Mat/index.html deleted file mode 100644 index 06dd89537..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_cpu_engine.Make_Nested.Graph.Optimiser.Operator.Mat)

Module Operator.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Scalar/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Scalar/index.html deleted file mode 100644 index 97d7fc2c6..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_cpu_engine.Make_Nested.Graph.Optimiser.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index 8b2f7d8a6..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_cpu_engine.Make_Nested.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 09e7ac209..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_cpu_engine.Make_Nested.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index c891dd141..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_cpu_engine.Make_Nested.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index e95b4d262..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_cpu_engine.Make_Nested.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 3bb779278..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_cpu_engine.Make_Nested.Graph.Optimiser.Operator.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index 1477c88b5..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_cpu_engine.Make_Nested.Graph.Optimiser.Operator.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/index.html deleted file mode 100644 index 582654a66..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_cpu_engine.Make_Nested.Graph.Optimiser.Operator.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/index.html deleted file mode 100644 index fc8606bb0..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_cpu_engine.Make_Nested.Graph.Optimiser.Operator.Symbol)

Module Operator.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/index.html deleted file mode 100644 index 5d31d0d27..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_computation_cpu_engine.Make_Nested.Graph.Optimiser.Operator)

Module Optimiser.Operator

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/index.html deleted file mode 100644 index aa8ab2ca1..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/Optimiser/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Optimiser (owl-base.Owl_computation_cpu_engine.Make_Nested.Graph.Optimiser)

Module Graph.Optimiser

Core functions
val estimate_complexity : 'a Owl_graph.node array -> int * int

TODO

val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/index.html deleted file mode 100644 index 46703b0ff..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/argument-1-Graph/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Graph (owl-base.Owl_computation_cpu_engine.Make_Nested.Graph)

Parameter Make_Nested.Graph

Type definition
type graph

TODO

Core functions
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string

TODO

val graph_to_dot : graph -> string

TODO

val graph_to_trace : graph -> string

TODO

val save_graph : 'a -> string -> unit

TODO

val load_graph : string -> 'a * 'b

TODO

val invalidate_rvs : graph -> unit

TODO

val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool

TODO

val update_iopair : graph -> unit

TODO

val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array

TODO

val optimise : graph -> unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_engine/Make_Nested/index.html b/owl-base/Owl_computation_cpu_engine/Make_Nested/index.html deleted file mode 100644 index 0d1559d7b..000000000 --- a/owl-base/Owl_computation_cpu_engine/Make_Nested/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Make_Nested (owl-base.Owl_computation_cpu_engine.Make_Nested)

Module Owl_computation_cpu_engine.Make_Nested

Parameters

Signature

module Graph = Graph
module CG_Init : sig ... end
module CG_Eval : sig ... end
val eval_graph : Graph.graph -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/.dummy b/owl-base/Owl_computation_cpu_eval/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Linalg/index.html b/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Linalg/index.html deleted file mode 100644 index f06a1aef3..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_cpu_eval.Make.Graph.Optimiser.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Mat/index.html b/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Mat/index.html deleted file mode 100644 index 627c3027c..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_cpu_eval.Make.Graph.Optimiser.Operator.Mat)

Module Operator.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Scalar/index.html b/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Scalar/index.html deleted file mode 100644 index bc62eb997..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_cpu_eval.Make.Graph.Optimiser.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index f72cd4691..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_cpu_eval.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index fc5c44469..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_cpu_eval.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 6460e95bf..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_cpu_eval.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index 9bbed3fa3..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_cpu_eval.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 9a1874888..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_cpu_eval.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index 1a0ad7b03..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_cpu_eval.Make.Graph.Optimiser.Operator.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/index.html b/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/index.html deleted file mode 100644 index 5531853f7..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_cpu_eval.Make.Graph.Optimiser.Operator.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/index.html b/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/index.html deleted file mode 100644 index 3c0de9c6b..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_cpu_eval.Make.Graph.Optimiser.Operator.Symbol)

Module Operator.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/index.html b/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/index.html deleted file mode 100644 index d633e06dd..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_computation_cpu_eval.Make.Graph.Optimiser.Operator)

Module Optimiser.Operator

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/index.html b/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/index.html deleted file mode 100644 index 71ed125c5..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/Optimiser/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Optimiser (owl-base.Owl_computation_cpu_eval.Make.Graph.Optimiser)

Module Graph.Optimiser

Core functions
val estimate_complexity : 'a Owl_graph.node array -> int * int

TODO

val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/index.html b/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/index.html deleted file mode 100644 index 308d3a0db..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/argument-1-Graph/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Graph (owl-base.Owl_computation_cpu_eval.Make.Graph)

Parameter Make.Graph

Type definition
type graph

TODO

Core functions
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string

TODO

val graph_to_dot : graph -> string

TODO

val graph_to_trace : graph -> string

TODO

val save_graph : 'a -> string -> unit

TODO

val load_graph : string -> 'a * 'b

TODO

val invalidate_rvs : graph -> unit

TODO

val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool

TODO

val update_iopair : graph -> unit

TODO

val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array

TODO

val optimise : graph -> unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_eval/Make/index.html b/owl-base/Owl_computation_cpu_eval/Make/index.html deleted file mode 100644 index 8fecfb306..000000000 --- a/owl-base/Owl_computation_cpu_eval/Make/index.html +++ /dev/null @@ -1,57 +0,0 @@ - -Make (owl-base.Owl_computation_cpu_eval.Make)

Module Owl_computation_cpu_eval.Make

Parameters

Signature

val invalidate_opt : - Graph.Optimiser.Operator.Symbol.Shape.Type.attr Owl_graph.node option -> - unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/.dummy b/owl-base/Owl_computation_cpu_init/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_cpu_init/Make/MultiMap/index.html b/owl-base/Owl_computation_cpu_init/Make/MultiMap/index.html deleted file mode 100644 index 70d70eb79..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/MultiMap/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MultiMap (owl-base.Owl_computation_cpu_init.Make.MultiMap)

Module Make.MultiMap

type key = int
type 'a t
val empty : 'a t
val is_empty : 'a t -> bool
val mem : key -> 'a t -> bool
val add : key -> 'a -> 'a t -> 'a t
val remove : key -> 'a t -> 'a t
val find : key -> 'a t -> 'a
val max_binding : 'a t -> key * 'a
val find_first_opt : (key -> bool) -> 'a t -> (key * 'a) option
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Linalg/index.html b/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Linalg/index.html deleted file mode 100644 index 683e6c2e3..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_cpu_init.Make.Graph.Optimiser.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Mat/index.html b/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Mat/index.html deleted file mode 100644 index 22213f4fa..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_cpu_init.Make.Graph.Optimiser.Operator.Mat)

Module Operator.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Scalar/index.html b/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Scalar/index.html deleted file mode 100644 index 741443727..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_cpu_init.Make.Graph.Optimiser.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index d0bb6d434..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_cpu_init.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 876ab5a5d..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_cpu_init.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 3f59de1ca..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_cpu_init.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index 9286e9f43..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_cpu_init.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 1dd421fd7..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_cpu_init.Make.Graph.Optimiser.Operator.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index c208433ed..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_cpu_init.Make.Graph.Optimiser.Operator.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/index.html b/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/index.html deleted file mode 100644 index 22a2686c6..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_cpu_init.Make.Graph.Optimiser.Operator.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/index.html b/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/index.html deleted file mode 100644 index 880eddf50..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_cpu_init.Make.Graph.Optimiser.Operator.Symbol)

Module Operator.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/index.html b/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/index.html deleted file mode 100644 index 3b76c5184..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_computation_cpu_init.Make.Graph.Optimiser.Operator)

Module Optimiser.Operator

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/index.html b/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/index.html deleted file mode 100644 index c36c1dc52..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/Optimiser/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Optimiser (owl-base.Owl_computation_cpu_init.Make.Graph.Optimiser)

Module Graph.Optimiser

Core functions
val estimate_complexity : 'a Owl_graph.node array -> int * int

TODO

val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/index.html b/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/index.html deleted file mode 100644 index 6a5f27f59..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/argument-1-Graph/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Graph (owl-base.Owl_computation_cpu_init.Make.Graph)

Parameter Make.Graph

Type definition
type graph

TODO

Core functions
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string

TODO

val graph_to_dot : graph -> string

TODO

val graph_to_trace : graph -> string

TODO

val save_graph : 'a -> string -> unit

TODO

val load_graph : string -> 'a * 'b

TODO

val invalidate_rvs : graph -> unit

TODO

val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool

TODO

val update_iopair : graph -> unit

TODO

val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array

TODO

val optimise : graph -> unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_cpu_init/Make/index.html b/owl-base/Owl_computation_cpu_init/Make/index.html deleted file mode 100644 index 0e3504589..000000000 --- a/owl-base/Owl_computation_cpu_init/Make/index.html +++ /dev/null @@ -1,13 +0,0 @@ - -Make (owl-base.Owl_computation_cpu_init.Make)

Module Owl_computation_cpu_init.Make

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/.dummy b/owl-base/Owl_computation_engine/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Linalg/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Linalg/index.html deleted file mode 100644 index 2e3b771e4..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine.Flatten.Engine.Graph.Optimiser.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Mat/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Mat/index.html deleted file mode 100644 index 191ad9b3b..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine.Flatten.Engine.Graph.Optimiser.Operator.Mat)

Module Operator.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Scalar/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Scalar/index.html deleted file mode 100644 index 17fe86604..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine.Flatten.Engine.Graph.Optimiser.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index fd727b97a..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine.Flatten.Engine.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 1907052e2..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine.Flatten.Engine.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 5c759e55d..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine.Flatten.Engine.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index be772cdb4..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_engine.Flatten.Engine.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index b41ed7516..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_engine.Flatten.Engine.Graph.Optimiser.Operator.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index 40f37e209..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_engine.Flatten.Engine.Graph.Optimiser.Operator.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/index.html deleted file mode 100644 index 45357094c..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_engine.Flatten.Engine.Graph.Optimiser.Operator.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/index.html deleted file mode 100644 index d08aaaab4..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_engine.Flatten.Engine.Graph.Optimiser.Operator.Symbol)

Module Operator.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/index.html deleted file mode 100644 index 9cdb4a5fe..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_computation_engine.Flatten.Engine.Graph.Optimiser.Operator)

Module Optimiser.Operator

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/index.html deleted file mode 100644 index 5fa13ff88..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/Optimiser/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Optimiser (owl-base.Owl_computation_engine.Flatten.Engine.Graph.Optimiser)

Module Graph.Optimiser

Core functions
val estimate_complexity : 'a Owl_graph.node array -> int * int

TODO

val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/index.html deleted file mode 100644 index e0c902731..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/Graph/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Graph (owl-base.Owl_computation_engine.Flatten.Engine.Graph)

Module Engine.Graph

Type definition
type graph

TODO

Core functions
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string

TODO

val graph_to_dot : graph -> string

TODO

val graph_to_trace : graph -> string

TODO

val save_graph : 'a -> string -> unit

TODO

val load_graph : string -> 'a * 'b

TODO

val invalidate_rvs : graph -> unit

TODO

val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool

TODO

val update_iopair : graph -> unit

TODO

val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array

TODO

val optimise : graph -> unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/index.html b/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/index.html deleted file mode 100644 index 6f7a3e451..000000000 --- a/owl-base/Owl_computation_engine/Flatten/argument-1-Engine/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Engine (owl-base.Owl_computation_engine.Flatten.Engine)

Parameter Flatten.Engine

Core evaluation functions of the engine

TODO

TODO

val eval_graph : Graph.graph -> unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Flatten/index.html b/owl-base/Owl_computation_engine/Flatten/index.html deleted file mode 100644 index 7e7efa982..000000000 --- a/owl-base/Owl_computation_engine/Flatten/index.html +++ /dev/null @@ -1,485 +0,0 @@ - -Flatten (owl-base.Owl_computation_engine.Flatten)

Module Owl_computation_engine.Flatten

Parameters

Signature

include module type of struct include Engine end
module Graph = Engine.Graph
Core evaluation functions of the engine

TODO

TODO

val eval_graph : Graph.graph -> unit

TODO

include module type of struct include Graph end
module Optimiser = Graph.Optimiser
type graph = Engine.Graph.graph
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string
val graph_to_dot : graph -> string
val graph_to_trace : graph -> string
val save_graph : 'a -> string -> unit
val load_graph : string -> 'a * 'b
val invalidate_rvs : graph -> unit
val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool
val update_iopair : graph -> unit
val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array
val optimise : graph -> unit
include module type of struct include Optimiser end
module Operator = Optimiser.Operator
val estimate_complexity : 'a Owl_graph.node array -> int * int
val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit
include module type of struct include Operator end
module Symbol = Operator.Symbol
val empty : int array -> Symbol.Shape.Type.arr
val zeros : int array -> Symbol.Shape.Type.arr
val ones : int array -> Symbol.Shape.Type.arr
val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr
val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr
val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr
val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr
val shape : Symbol.Shape.Type.arr -> int array
val numel : Symbol.Shape.Type.arr -> int
val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit
val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr
val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit
val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit
val copy_ : out:'a -> 'b -> 'c
val reset : Symbol.Shape.Type.arr -> unit
val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr
val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr
val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr
val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr
val split : ?axis:int -> 'a -> 'b -> 'c
val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array
val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit
val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr
val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr
val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val row_num : Symbol.Shape.Type.arr -> int
val col_num : Symbol.Shape.Type.arr -> int
val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit
val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit
val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val to_rows : Symbol.Shape.Type.arr -> 'a array
val to_cols : Symbol.Shape.Type.arr -> 'a array
val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr
val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr
val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array
module Scalar = Operator.Scalar
module Mat = Operator.Mat
module Linalg = Operator.Linalg
include module type of struct include Symbol end
module Shape = Symbol.Shape
val op_to_str : Shape.Type.op -> string
val is_random_variable : Shape.Type.op -> bool
val refnum : 'a Owl_graph.node -> int
val node_shape : Shape.Type.attr Owl_graph.node -> int array
val node_numel : Shape.Type.attr Owl_graph.node -> int
val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool
val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit
val shape_to_str : int array option array -> string
val node_to_str : Shape.Type.attr Owl_graph.node -> string
val node_to_arr : Shape.Type.t -> Shape.Type.arr
val arr_to_node : Shape.Type.arr -> Shape.Type.t
val node_to_elt : Shape.Type.t -> Shape.Type.elt
val elt_to_node : Shape.Type.elt -> Shape.Type.t
val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node
val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node
val var_arr : ?shape:int array -> string -> Shape.Type.arr
val var_elt : string -> Shape.Type.elt
val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr
val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt
val new_block_id : unit -> int
val make_empty_block : ?block_id:int -> int -> Shape.Type.block
val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit
val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit
val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option
val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit
val get_block_id : Shape.Type.attr Owl_graph.node -> int
val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit
val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit
val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit
val get_reuse : Shape.Type.attr Owl_graph.node -> bool
val is_shared : Shape.Type.attr Owl_graph.node -> bool
val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array
val is_var : Shape.Type.attr Owl_graph.node -> bool
val is_const : Shape.Type.attr Owl_graph.node -> bool
val is_node_arr : Shape.Type.attr Owl_graph.node -> bool
val is_node_elt : Shape.Type.attr Owl_graph.node -> bool
val is_assigned : Shape.Type.attr Owl_graph.node -> bool
val check_assigned : Shape.Type.attr Owl_graph.node -> unit
val is_valid : Shape.Type.attr Owl_graph.node -> bool
val validate : Shape.Type.attr Owl_graph.node -> unit
val invalidate : Shape.Type.attr Owl_graph.node -> unit
val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit
val is_freeze : Shape.Type.attr Owl_graph.node -> bool
val freeze : Shape.Type.attr Owl_graph.node -> unit
val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit
val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit
val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit
val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit
val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit
val float_to_elt : float -> Shape.Type.elt
val elt_to_float : Shape.Type.elt -> float
include module type of struct include Shape end
module Type = Shape.Type
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array
include module type of struct include Type end
module Device = Type.Device
and block = Engine.Graph.Optimiser.Operator.Symbol.Shape.Type.block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}
and attr = Engine.Graph.Optimiser.Operator.Symbol.Shape.Type.attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}
and op = Engine.Graph.Optimiser.Operator.Symbol.Shape.Type.op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
include module type of struct include Device end
module A = Device.A
val make_device : unit -> device
val arr_to_value : A.arr -> value
val value_to_arr : value -> A.arr
val elt_to_value : A.elt -> value
val value_to_elt : value -> A.elt
val value_to_float : value -> float
val is_arr : value -> bool
val is_elt : value -> bool
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Linalg/index.html b/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Linalg/index.html deleted file mode 100644 index e099d4d81..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine.Make_Graph.Optimiser.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Mat/index.html b/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Mat/index.html deleted file mode 100644 index 04ad8c4f4..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine.Make_Graph.Optimiser.Operator.Mat)

Module Operator.Mat

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Scalar/index.html b/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Scalar/index.html deleted file mode 100644 index 6f9f901cd..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine.Make_Graph.Optimiser.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index fdcb03ef3..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine.Make_Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index e74963864..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine.Make_Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 42fe06c57..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine.Make_Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index c3caf57cd..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,381 +0,0 @@ - -A (owl-base.Owl_computation_engine.Make_Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index bc7b01ad9..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Device (owl-base.Owl_computation_engine.Make_Graph.Optimiser.Operator.Symbol.Shape.Type.Device)

Module Type.Device

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index 832db66d2..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,17 +0,0 @@ - -Type (owl-base.Owl_computation_engine.Make_Graph.Optimiser.Operator.Symbol.Shape.Type)

Module Shape.Type

module Device : sig ... end
and attr = - Owl_computation_optimiser.Make(Owl_computation_operator.Make(Owl_computation_symbol.Make(Owl_computation_shape.Make(Owl_computation_type.Make(Device))))).Operator.Symbol.Shape.Type.attr = - {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}
and op = - Owl_computation_optimiser.Make(Owl_computation_operator.Make(Owl_computation_symbol.Make(Owl_computation_shape.Make(Owl_computation_type.Make(Device))))).Operator.Symbol.Shape.Type.op = -
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/index.html b/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/index.html deleted file mode 100644 index d8d4cefab..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_engine.Make_Graph.Optimiser.Operator.Symbol.Shape)

Module Symbol.Shape

module Type : sig ... end
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/index.html b/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/index.html deleted file mode 100644 index afca835a0..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_engine.Make_Graph.Optimiser.Operator.Symbol)

Module Operator.Symbol

module Shape : sig ... end
val op_to_str : Shape.Type.op -> string
val is_random_variable : Shape.Type.op -> bool
val refnum : 'a Owl_graph.node -> int
val node_shape : Shape.Type.attr Owl_graph.node -> int array
val node_numel : Shape.Type.attr Owl_graph.node -> int
val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool
val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit
val shape_to_str : int array option array -> string
val node_to_str : Shape.Type.attr Owl_graph.node -> string
val node_to_arr : Shape.Type.t -> Shape.Type.arr
val arr_to_node : Shape.Type.arr -> Shape.Type.t
val node_to_elt : Shape.Type.t -> Shape.Type.elt
val elt_to_node : Shape.Type.elt -> Shape.Type.t
val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node
val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node
val var_arr : ?shape:int array -> string -> Shape.Type.arr
val var_elt : string -> Shape.Type.elt
val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr
val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt
val new_block_id : unit -> int
val make_empty_block : ?block_id:int -> int -> Shape.Type.block
val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit
val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit
val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option
val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit
val get_block_id : Shape.Type.attr Owl_graph.node -> int
val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit
val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit
val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit
val get_reuse : Shape.Type.attr Owl_graph.node -> bool
val is_shared : Shape.Type.attr Owl_graph.node -> bool
val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array
val is_var : Shape.Type.attr Owl_graph.node -> bool
val is_const : Shape.Type.attr Owl_graph.node -> bool
val is_node_arr : Shape.Type.attr Owl_graph.node -> bool
val is_node_elt : Shape.Type.attr Owl_graph.node -> bool
val is_assigned : Shape.Type.attr Owl_graph.node -> bool
val check_assigned : Shape.Type.attr Owl_graph.node -> unit
val is_valid : Shape.Type.attr Owl_graph.node -> bool
val validate : Shape.Type.attr Owl_graph.node -> unit
val invalidate : Shape.Type.attr Owl_graph.node -> unit
val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit
val is_freeze : Shape.Type.attr Owl_graph.node -> bool
val freeze : Shape.Type.attr Owl_graph.node -> unit
val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit
val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit
val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit
val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit
val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit
val float_to_elt : float -> Shape.Type.elt
val elt_to_float : Shape.Type.elt -> float
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/index.html b/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/index.html deleted file mode 100644 index da568755a..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_computation_engine.Make_Graph.Optimiser.Operator)

Module Optimiser.Operator

module Symbol : sig ... end
val empty : int array -> Symbol.Shape.Type.arr
val zeros : int array -> Symbol.Shape.Type.arr
val ones : int array -> Symbol.Shape.Type.arr
val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr
val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr
val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr
val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr
val shape : Symbol.Shape.Type.arr -> int array
val numel : Symbol.Shape.Type.arr -> int
val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit
val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr
val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit
val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit
val copy_ : out:'a -> 'b -> 'c
val reset : Symbol.Shape.Type.arr -> unit
val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr
val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr
val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr
val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr
val split : ?axis:int -> 'a -> 'b -> 'c
val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array
val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit
val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr
val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr
val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val row_num : Symbol.Shape.Type.arr -> int
val col_num : Symbol.Shape.Type.arr -> int
val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit
val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit
val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val to_rows : Symbol.Shape.Type.arr -> 'a array
val to_cols : Symbol.Shape.Type.arr -> 'a array
val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr
val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr
val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/index.html b/owl-base/Owl_computation_engine/Make_Graph/Optimiser/index.html deleted file mode 100644 index 56badd689..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/Optimiser/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Optimiser (owl-base.Owl_computation_engine.Make_Graph.Optimiser)

Module Make_Graph.Optimiser

module Operator : sig ... end
val estimate_complexity : 'a Owl_graph.node array -> int * int
val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/Linalg/index.html b/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/Linalg/index.html deleted file mode 100644 index 67097ebde..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine.Make_Graph.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/Mat/index.html b/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/Mat/index.html deleted file mode 100644 index 99df3f15b..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine.Make_Graph.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/Scalar/index.html b/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/Scalar/index.html deleted file mode 100644 index f8d9be63e..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine.Make_Graph.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/index.html b/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/index.html deleted file mode 100644 index d54c3413a..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_engine.Make_Graph.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/index.html b/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/index.html deleted file mode 100644 index 79c45c852..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/argument-1-Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_engine.Make_Graph.Device)

Parameter Make_Graph.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine/Make_Graph/index.html b/owl-base/Owl_computation_engine/Make_Graph/index.html deleted file mode 100644 index 59d07edc4..000000000 --- a/owl-base/Owl_computation_engine/Make_Graph/index.html +++ /dev/null @@ -1,39 +0,0 @@ - -Make_Graph (owl-base.Owl_computation_engine.Make_Graph)

Module Owl_computation_engine.Make_Graph

Parameters

Signature

include sig ... end
module Optimiser : sig ... end
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string
val _block_colour : int -> string
val graph_to_dot : graph -> string
val graph_to_trace : graph -> string
val save_graph : 'a -> string -> unit
val load_graph : string -> 'a * 'b
val invalidate_rvs : graph -> unit
val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool
val update_iopair : graph -> unit
val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array
val optimise : graph -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/.dummy b/owl-base/Owl_computation_engine_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/Linalg/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/Linalg/index.html deleted file mode 100644 index 586c9f978..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine_sig.Flatten_Sig.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/Mat/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/Mat/index.html deleted file mode 100644 index 4ead59605..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine_sig.Flatten_Sig.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/Scalar/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/Scalar/index.html deleted file mode 100644 index 24d9f2ddf..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine_sig.Flatten_Sig.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/index.html deleted file mode 100644 index 730ceab60..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_engine_sig.Flatten_Sig.A)

Module Flatten_Sig.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/Linalg/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/Linalg/index.html deleted file mode 100644 index 4e6664ac4..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine_sig.Flatten_Sig.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/Mat/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/Mat/index.html deleted file mode 100644 index cb808939b..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine_sig.Flatten_Sig.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/Scalar/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/Scalar/index.html deleted file mode 100644 index fa181f911..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine_sig.Flatten_Sig.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/index.html deleted file mode 100644 index f02354a8d..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_engine_sig.Flatten_Sig.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/index.html deleted file mode 100644 index be8188a8c..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_engine_sig.Flatten_Sig.Device)

Module Flatten_Sig.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Linalg/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Linalg/index.html deleted file mode 100644 index 2a6838e60..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine_sig.Flatten_Sig.Graph.Optimiser.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Mat/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Mat/index.html deleted file mode 100644 index 863071621..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine_sig.Flatten_Sig.Graph.Optimiser.Operator.Mat)

Module Operator.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Scalar/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Scalar/index.html deleted file mode 100644 index c03d49685..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine_sig.Flatten_Sig.Graph.Optimiser.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index 003a2a9d8..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine_sig.Flatten_Sig.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 532a58b42..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine_sig.Flatten_Sig.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 9b4b669db..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine_sig.Flatten_Sig.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index a8e0dbf2f..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_engine_sig.Flatten_Sig.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index f2eb5488a..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_engine_sig.Flatten_Sig.Graph.Optimiser.Operator.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index 9fb38a67f..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_engine_sig.Flatten_Sig.Graph.Optimiser.Operator.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/index.html deleted file mode 100644 index 194098722..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_engine_sig.Flatten_Sig.Graph.Optimiser.Operator.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/index.html deleted file mode 100644 index 14ae67eec..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_engine_sig.Flatten_Sig.Graph.Optimiser.Operator.Symbol)

Module Operator.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/index.html deleted file mode 100644 index 511beb1a3..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_computation_engine_sig.Flatten_Sig.Graph.Optimiser.Operator)

Module Optimiser.Operator

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/index.html deleted file mode 100644 index 50e76900c..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/Optimiser/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Optimiser (owl-base.Owl_computation_engine_sig.Flatten_Sig.Graph.Optimiser)

Module Graph.Optimiser

Core functions
val estimate_complexity : 'a Owl_graph.node array -> int * int

TODO

val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/index.html deleted file mode 100644 index 2c7bcb42a..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Graph/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Graph (owl-base.Owl_computation_engine_sig.Flatten_Sig.Graph)

Module Flatten_Sig.Graph

Type definition
type graph

TODO

Core functions
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string

TODO

val graph_to_dot : graph -> string

TODO

val graph_to_trace : graph -> string

TODO

val save_graph : 'a -> string -> unit

TODO

val load_graph : string -> 'a * 'b

TODO

val invalidate_rvs : graph -> unit

TODO

val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool

TODO

val update_iopair : graph -> unit

TODO

val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array

TODO

val optimise : graph -> unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Linalg/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Linalg/index.html deleted file mode 100644 index 5222c4915..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine_sig.Flatten_Sig.Linalg)

Module Flatten_Sig.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Mat/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Mat/index.html deleted file mode 100644 index 7873b8f88..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine_sig.Flatten_Sig.Mat)

Module Flatten_Sig.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Linalg/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Linalg/index.html deleted file mode 100644 index a49b782ce..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine_sig.Flatten_Sig.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Mat/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Mat/index.html deleted file mode 100644 index 48de2cf04..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine_sig.Flatten_Sig.Operator.Mat)

Module Operator.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Scalar/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Scalar/index.html deleted file mode 100644 index 411266f18..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine_sig.Flatten_Sig.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index 0606dd140..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine_sig.Flatten_Sig.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 87161e8e4..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine_sig.Flatten_Sig.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 0b022d4aa..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine_sig.Flatten_Sig.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index 45639ad90..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_engine_sig.Flatten_Sig.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 6f75b920e..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_engine_sig.Flatten_Sig.Operator.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index e6ebf0c18..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_engine_sig.Flatten_Sig.Operator.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/index.html deleted file mode 100644 index 6895163cb..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_engine_sig.Flatten_Sig.Operator.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/index.html deleted file mode 100644 index 5798d06e7..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_engine_sig.Flatten_Sig.Operator.Symbol)

Module Operator.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/index.html deleted file mode 100644 index 1a2653ad8..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_computation_engine_sig.Flatten_Sig.Operator)

Module Flatten_Sig.Operator

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Linalg/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Linalg/index.html deleted file mode 100644 index 7bb150a82..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine_sig.Flatten_Sig.Optimiser.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Mat/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Mat/index.html deleted file mode 100644 index 7f4dd56e0..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine_sig.Flatten_Sig.Optimiser.Operator.Mat)

Module Operator.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Scalar/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Scalar/index.html deleted file mode 100644 index 17d9e77a2..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine_sig.Flatten_Sig.Optimiser.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index bfce37c0c..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine_sig.Flatten_Sig.Optimiser.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 6ad560d9c..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine_sig.Flatten_Sig.Optimiser.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 286a06013..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine_sig.Flatten_Sig.Optimiser.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index 767d6e342..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_engine_sig.Flatten_Sig.Optimiser.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index ea8f7e883..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_engine_sig.Flatten_Sig.Optimiser.Operator.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index 515c833be..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_engine_sig.Flatten_Sig.Optimiser.Operator.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/index.html deleted file mode 100644 index e47b18820..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_engine_sig.Flatten_Sig.Optimiser.Operator.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/index.html deleted file mode 100644 index 3c31c7c3a..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_engine_sig.Flatten_Sig.Optimiser.Operator.Symbol)

Module Operator.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/index.html deleted file mode 100644 index 8937937bd..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_computation_engine_sig.Flatten_Sig.Optimiser.Operator)

Module Optimiser.Operator

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/index.html deleted file mode 100644 index ca12ce169..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Optimiser/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Optimiser (owl-base.Owl_computation_engine_sig.Flatten_Sig.Optimiser)

Module Flatten_Sig.Optimiser

Core functions
val estimate_complexity : 'a Owl_graph.node array -> int * int

TODO

val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Scalar/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Scalar/index.html deleted file mode 100644 index 4bd744263..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine_sig.Flatten_Sig.Scalar)

Module Flatten_Sig.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index 8e75bfea7..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine_sig.Flatten_Sig.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 01576aaa8..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine_sig.Flatten_Sig.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index a36dbb512..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine_sig.Flatten_Sig.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/index.html deleted file mode 100644 index 252847b07..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_engine_sig.Flatten_Sig.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/index.html deleted file mode 100644 index 69a6ed27c..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_engine_sig.Flatten_Sig.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/index.html deleted file mode 100644 index b26e035e2..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_engine_sig.Flatten_Sig.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/index.html deleted file mode 100644 index 08124402b..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_engine_sig.Flatten_Sig.Shape)

Module Flatten_Sig.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index 80bee6e74..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine_sig.Flatten_Sig.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 2cfc2db7a..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine_sig.Flatten_Sig.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 8ebcf6e14..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine_sig.Flatten_Sig.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index 9fd7a1ec0..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_engine_sig.Flatten_Sig.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 91adaa7af..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_engine_sig.Flatten_Sig.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/index.html deleted file mode 100644 index ee97d017a..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_engine_sig.Flatten_Sig.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/index.html deleted file mode 100644 index 2a7a20f3a..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_engine_sig.Flatten_Sig.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/index.html deleted file mode 100644 index b1f334d5c..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_engine_sig.Flatten_Sig.Symbol)

Module Flatten_Sig.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/Linalg/index.html deleted file mode 100644 index 64b46743f..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine_sig.Flatten_Sig.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/Mat/index.html deleted file mode 100644 index 0f22eff58..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine_sig.Flatten_Sig.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/Scalar/index.html deleted file mode 100644 index dc2ddb3ea..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine_sig.Flatten_Sig.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/index.html deleted file mode 100644 index e1b989ac5..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_engine_sig.Flatten_Sig.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/index.html deleted file mode 100644 index 5d61ce154..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_engine_sig.Flatten_Sig.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/index.html deleted file mode 100644 index f18196fd0..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_engine_sig.Flatten_Sig.Type)

Module Flatten_Sig.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/index.html b/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/index.html deleted file mode 100644 index 110ae4755..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Flatten_Sig/index.html +++ /dev/null @@ -1,485 +0,0 @@ - -Flatten_Sig (owl-base.Owl_computation_engine_sig.Flatten_Sig)

Module type Owl_computation_engine_sig.Flatten_Sig

include Owl_types_computation_engine.Sig
Core evaluation functions of the engine

TODO

TODO

val eval_graph : Graph.graph -> unit

TODO

include Owl_computation_graph_sig.Sig
Type definition
type graph

TODO

Core functions
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string

TODO

val graph_to_dot : graph -> string

TODO

val graph_to_trace : graph -> string

TODO

val save_graph : 'a -> string -> unit

TODO

val load_graph : string -> 'a * 'b

TODO

val invalidate_rvs : graph -> unit

TODO

val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool

TODO

val update_iopair : graph -> unit

TODO

val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array

TODO

val optimise : graph -> unit

TODO

include Owl_computation_optimiser_sig.Sig
Core functions
val estimate_complexity : 'a Owl_graph.node array -> int * int

TODO

val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit

TODO

include Owl_computation_operator_sig.Sig
Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
include Owl_computation_symbol_sig.Sig
Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

include Owl_computation_shape_sig.Sig
Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

include Owl_computation_type_sig.Sig
Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
include Owl_types_computation_device.Sig
Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Linalg/index.html b/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Linalg/index.html deleted file mode 100644 index 0a7bea658..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine_sig.Make_Graph_Sig.Optimiser.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Mat/index.html b/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Mat/index.html deleted file mode 100644 index 14a11091c..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine_sig.Make_Graph_Sig.Optimiser.Operator.Mat)

Module Operator.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Scalar/index.html b/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Scalar/index.html deleted file mode 100644 index ed2140ac5..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine_sig.Make_Graph_Sig.Optimiser.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index 28f4c1ba2..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_engine_sig.Make_Graph_Sig.Optimiser.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index bf58e592c..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_engine_sig.Make_Graph_Sig.Optimiser.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 97d0c4fda..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_engine_sig.Make_Graph_Sig.Optimiser.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index e43300f97..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_engine_sig.Make_Graph_Sig.Optimiser.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index f0770fde8..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_engine_sig.Make_Graph_Sig.Optimiser.Operator.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index f53acc297..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_engine_sig.Make_Graph_Sig.Optimiser.Operator.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/index.html b/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/index.html deleted file mode 100644 index cbb1660af..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_engine_sig.Make_Graph_Sig.Optimiser.Operator.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/index.html b/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/index.html deleted file mode 100644 index 5c73606f1..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_engine_sig.Make_Graph_Sig.Optimiser.Operator.Symbol)

Module Operator.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/index.html b/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/index.html deleted file mode 100644 index 22502f059..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_computation_engine_sig.Make_Graph_Sig.Optimiser.Operator)

Module Optimiser.Operator

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/index.html b/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/index.html deleted file mode 100644 index 207be9118..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/Optimiser/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Optimiser (owl-base.Owl_computation_engine_sig.Make_Graph_Sig.Optimiser)

Module Make_Graph_Sig.Optimiser

Core functions
val estimate_complexity : 'a Owl_graph.node array -> int * int

TODO

val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/index.html b/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/index.html deleted file mode 100644 index f4c44b09b..000000000 --- a/owl-base/Owl_computation_engine_sig/module-type-Make_Graph_Sig/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Make_Graph_Sig (owl-base.Owl_computation_engine_sig.Make_Graph_Sig)

Module type Owl_computation_engine_sig.Make_Graph_Sig

include Owl_computation_graph_sig.Sig
Type definition
type graph

TODO

Core functions
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string

TODO

val graph_to_dot : graph -> string

TODO

val graph_to_trace : graph -> string

TODO

val save_graph : 'a -> string -> unit

TODO

val load_graph : string -> 'a * 'b

TODO

val invalidate_rvs : graph -> unit

TODO

val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool

TODO

val update_iopair : graph -> unit

TODO

val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array

TODO

val optimise : graph -> unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph/.dummy b/owl-base/Owl_computation_graph/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Linalg/index.html b/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Linalg/index.html deleted file mode 100644 index f8677c481..000000000 --- a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_graph.Make.Optimiser.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Mat/index.html b/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Mat/index.html deleted file mode 100644 index 10ae156bb..000000000 --- a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_graph.Make.Optimiser.Operator.Mat)

Module Operator.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Scalar/index.html b/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Scalar/index.html deleted file mode 100644 index b119f1a6b..000000000 --- a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_graph.Make.Optimiser.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index 52f74544c..000000000 --- a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_graph.Make.Optimiser.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 214c33683..000000000 --- a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_graph.Make.Optimiser.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 7028787e6..000000000 --- a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_graph.Make.Optimiser.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index e2441ec6d..000000000 --- a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_graph.Make.Optimiser.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 9be7835d9..000000000 --- a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_graph.Make.Optimiser.Operator.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index d336616eb..000000000 --- a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_graph.Make.Optimiser.Operator.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/index.html b/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/index.html deleted file mode 100644 index 650fda872..000000000 --- a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_graph.Make.Optimiser.Operator.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/index.html b/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/index.html deleted file mode 100644 index 0c937ac2b..000000000 --- a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_graph.Make.Optimiser.Operator.Symbol)

Module Operator.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/index.html b/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/index.html deleted file mode 100644 index 548051b7b..000000000 --- a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_computation_graph.Make.Optimiser.Operator)

Module Optimiser.Operator

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/index.html b/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/index.html deleted file mode 100644 index b6a59250b..000000000 --- a/owl-base/Owl_computation_graph/Make/argument-1-Optimiser/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Optimiser (owl-base.Owl_computation_graph.Make.Optimiser)

Parameter Make.Optimiser

Core functions
val estimate_complexity : 'a Owl_graph.node array -> int * int

TODO

val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph/Make/index.html b/owl-base/Owl_computation_graph/Make/index.html deleted file mode 100644 index ccf703256..000000000 --- a/owl-base/Owl_computation_graph/Make/index.html +++ /dev/null @@ -1,37 +0,0 @@ - -Make (owl-base.Owl_computation_graph.Make)

Module Owl_computation_graph.Make

Parameters

Signature

module Optimiser = Optimiser
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string
val _block_colour : int -> string
val graph_to_dot : graph -> string
val graph_to_trace : graph -> string
val save_graph : 'a -> string -> unit
val load_graph : string -> 'a * 'b
val invalidate_rvs : graph -> unit
val is_iopair_safe : 'a Owl_graph.node -> 'b Owl_graph.node -> bool
val update_iopair : graph -> unit
val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array
val optimise : graph -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_graph_sig/.dummy b/owl-base/Owl_computation_graph_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Linalg/index.html b/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Linalg/index.html deleted file mode 100644 index 1b992101e..000000000 --- a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_graph_sig.Sig.Optimiser.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Mat/index.html b/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Mat/index.html deleted file mode 100644 index f9ed8ce2c..000000000 --- a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_graph_sig.Sig.Optimiser.Operator.Mat)

Module Operator.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Scalar/index.html b/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Scalar/index.html deleted file mode 100644 index bc3402ff3..000000000 --- a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_graph_sig.Sig.Optimiser.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index 418a46ce5..000000000 --- a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_graph_sig.Sig.Optimiser.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 2def90b15..000000000 --- a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_graph_sig.Sig.Optimiser.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 2f1d83ac2..000000000 --- a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_graph_sig.Sig.Optimiser.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index 118949be5..000000000 --- a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_graph_sig.Sig.Optimiser.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 2148ae157..000000000 --- a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_graph_sig.Sig.Optimiser.Operator.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index 0fd866ffe..000000000 --- a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_graph_sig.Sig.Optimiser.Operator.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/index.html b/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/index.html deleted file mode 100644 index 1f752137f..000000000 --- a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_graph_sig.Sig.Optimiser.Operator.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/index.html b/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/index.html deleted file mode 100644 index 1268d42f4..000000000 --- a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_graph_sig.Sig.Optimiser.Operator.Symbol)

Module Operator.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/index.html b/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/index.html deleted file mode 100644 index 6765b8b0a..000000000 --- a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_computation_graph_sig.Sig.Optimiser.Operator)

Module Optimiser.Operator

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/index.html b/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/index.html deleted file mode 100644 index 45da71231..000000000 --- a/owl-base/Owl_computation_graph_sig/module-type-Sig/Optimiser/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Optimiser (owl-base.Owl_computation_graph_sig.Sig.Optimiser)

Module Sig.Optimiser

Core functions
val estimate_complexity : 'a Owl_graph.node array -> int * int

TODO

val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_graph_sig/module-type-Sig/index.html b/owl-base/Owl_computation_graph_sig/module-type-Sig/index.html deleted file mode 100644 index cd78019cd..000000000 --- a/owl-base/Owl_computation_graph_sig/module-type-Sig/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Sig (owl-base.Owl_computation_graph_sig.Sig)

Module type Owl_computation_graph_sig.Sig

Type definition
type graph

TODO

Core functions
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string

TODO

val graph_to_dot : graph -> string

TODO

val graph_to_trace : graph -> string

TODO

val save_graph : 'a -> string -> unit

TODO

val load_graph : string -> 'a * 'b

TODO

val invalidate_rvs : graph -> unit

TODO

val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool

TODO

val update_iopair : graph -> unit

TODO

val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array

TODO

val optimise : graph -> unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_operator/.dummy b/owl-base/Owl_computation_operator/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_operator/Make/Linalg/index.html b/owl-base/Owl_computation_operator/Make/Linalg/index.html deleted file mode 100644 index 1f2c80830..000000000 --- a/owl-base/Owl_computation_operator/Make/Linalg/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Linalg (owl-base.Owl_computation_operator.Make.Linalg)

Module Make.Linalg

val logdet : 'a -> 'b
val chol : ?upper:bool -> 'a -> 'b
val svd : ?thin:bool -> 'a -> 'b
val qr : 'a -> 'b
val lq : 'a -> 'b
val sylvester : 'a -> 'b -> 'c -> 'd
val lyapunov : 'a -> 'b -> 'c
val discrete_lyapunov : ?solver:[> `default ] -> 'a -> 'b -> 'c
val linsolve : ?trans:'a -> ?typ:[> `n ] -> 'b -> 'c -> 'd
val care : ?diag_r:bool -> 'a -> 'b -> 'c -> 'd -> 'e
val dare : ?diag_r:bool -> 'a -> 'b -> 'c -> 'd -> 'e
\ No newline at end of file diff --git a/owl-base/Owl_computation_operator/Make/Mat/index.html b/owl-base/Owl_computation_operator/Make/Mat/index.html deleted file mode 100644 index 282126d2c..000000000 --- a/owl-base/Owl_computation_operator/Make/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_operator.Make.Mat)

Module Make.Mat

val eye : 'a -> 'b
val diagm : ?k:'a -> 'b -> 'c
val tril : ?k:'a -> 'b -> 'c
val triu : ?k:'a -> 'b -> 'c
\ No newline at end of file diff --git a/owl-base/Owl_computation_operator/Make/Scalar/index.html b/owl-base/Owl_computation_operator/Make/Scalar/index.html deleted file mode 100644 index 26dc08394..000000000 --- a/owl-base/Owl_computation_operator/Make/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_operator.Make.Scalar)

Module Make.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index 5ea60b3b9..000000000 --- a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_operator.Make.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 39018f7a9..000000000 --- a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_operator.Make.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index e66c4f3d2..000000000 --- a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_operator.Make.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index 572512bd3..000000000 --- a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_operator.Make.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 5a2c8bd20..000000000 --- a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_operator.Make.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/index.html b/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/index.html deleted file mode 100644 index 8f2f26f46..000000000 --- a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_operator.Make.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/index.html b/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/index.html deleted file mode 100644 index da199a4c6..000000000 --- a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_operator.Make.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/index.html b/owl-base/Owl_computation_operator/Make/argument-1-Symbol/index.html deleted file mode 100644 index df4df4cb0..000000000 --- a/owl-base/Owl_computation_operator/Make/argument-1-Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_operator.Make.Symbol)

Parameter Make.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_operator/Make/index.html b/owl-base/Owl_computation_operator/Make/index.html deleted file mode 100644 index 132740c21..000000000 --- a/owl-base/Owl_computation_operator/Make/index.html +++ /dev/null @@ -1,413 +0,0 @@ - -Make (owl-base.Owl_computation_operator.Make)

Module Owl_computation_operator.Make

Parameters

Signature

module Symbol = Symbol
val empty : int array -> Symbol.Shape.Type.arr
val zeros : int array -> Symbol.Shape.Type.arr
val ones : int array -> Symbol.Shape.Type.arr
val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr
val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr
val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr
val init_nd : 'a -> 'b -> 'c
val shape : Symbol.Shape.Type.arr -> int array
val numel : Symbol.Shape.Type.arr -> int
val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit
val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr
val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit
val set_fancy : - Owl_types_common.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit
val copy_ : out:'a -> 'b -> 'c
val reset : Symbol.Shape.Type.arr -> unit
val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val expand : ?hi:bool -> 'a -> 'b -> 'c
val squeeze : ?axis:'a array -> 'b -> 'c
val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr
val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr
val concat : axis:'a -> 'b
val split : ?axis:int -> 'a -> 'b -> 'c
val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array
val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit
val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr
val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr
val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool1d_backward : - Owl_types_common.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool2d_backward : - Owl_types_common.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool3d_backward : - Owl_types_common.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool1d_backward : - Owl_types_common.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool2d_backward : - Owl_types_common.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool3d_backward : - Owl_types_common.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val row_num : Symbol.Shape.Type.arr -> int
val col_num : Symbol.Shape.Type.arr -> int
val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit
val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit
val diag : ?k:'a -> 'b -> 'c
val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val to_rows : Symbol.Shape.Type.arr -> 'a array
val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr
val of_cols : 'a -> 'b
val to_cols : 'a -> 'b
val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr
val to_arrays : 'a -> 'b

Scalar maths

module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_operator_sig/.dummy b/owl-base/Owl_computation_operator_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_operator_sig/module-type-Sig/Linalg/index.html b/owl-base/Owl_computation_operator_sig/module-type-Sig/Linalg/index.html deleted file mode 100644 index 020adace6..000000000 --- a/owl-base/Owl_computation_operator_sig/module-type-Sig/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_operator_sig.Sig.Linalg)

Module Sig.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_operator_sig/module-type-Sig/Mat/index.html b/owl-base/Owl_computation_operator_sig/module-type-Sig/Mat/index.html deleted file mode 100644 index d3debbc31..000000000 --- a/owl-base/Owl_computation_operator_sig/module-type-Sig/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_operator_sig.Sig.Mat)

Module Sig.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_operator_sig/module-type-Sig/Scalar/index.html b/owl-base/Owl_computation_operator_sig/module-type-Sig/Scalar/index.html deleted file mode 100644 index dfe954164..000000000 --- a/owl-base/Owl_computation_operator_sig/module-type-Sig/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_operator_sig.Sig.Scalar)

Module Sig.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index 9d8de9a0e..000000000 --- a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_operator_sig.Sig.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 5331fb678..000000000 --- a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_operator_sig.Sig.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index f1a85bbde..000000000 --- a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_operator_sig.Sig.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index b7756596c..000000000 --- a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_operator_sig.Sig.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 7955b7870..000000000 --- a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_operator_sig.Sig.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/index.html deleted file mode 100644 index e4c6a6be0..000000000 --- a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_operator_sig.Sig.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/index.html b/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/index.html deleted file mode 100644 index 9a0dd1b7c..000000000 --- a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_operator_sig.Sig.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/index.html b/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/index.html deleted file mode 100644 index fd2281f2e..000000000 --- a/owl-base/Owl_computation_operator_sig/module-type-Sig/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_operator_sig.Sig.Symbol)

Module Sig.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_operator_sig/module-type-Sig/index.html b/owl-base/Owl_computation_operator_sig/module-type-Sig/index.html deleted file mode 100644 index 3583bb837..000000000 --- a/owl-base/Owl_computation_operator_sig/module-type-Sig/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Sig (owl-base.Owl_computation_operator_sig.Sig)

Module type Owl_computation_operator_sig.Sig

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser/.dummy b/owl-base/Owl_computation_optimiser/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Linalg/index.html b/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Linalg/index.html deleted file mode 100644 index 6cae3c041..000000000 --- a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_optimiser.Make.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Mat/index.html b/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Mat/index.html deleted file mode 100644 index 59f8e418f..000000000 --- a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_optimiser.Make.Operator.Mat)

Module Operator.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Scalar/index.html b/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Scalar/index.html deleted file mode 100644 index fea86f481..000000000 --- a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_optimiser.Make.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index 4b5ce4356..000000000 --- a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_optimiser.Make.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index cee4b2e95..000000000 --- a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_optimiser.Make.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index ad3f81e91..000000000 --- a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_optimiser.Make.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index 43e5c2919..000000000 --- a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_optimiser.Make.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 446541cb3..000000000 --- a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_optimiser.Make.Operator.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index bff0d1b6d..000000000 --- a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_optimiser.Make.Operator.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/index.html b/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/index.html deleted file mode 100644 index 50091dd4f..000000000 --- a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_optimiser.Make.Operator.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/index.html b/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/index.html deleted file mode 100644 index 0c0572955..000000000 --- a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_optimiser.Make.Operator.Symbol)

Module Operator.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/index.html b/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/index.html deleted file mode 100644 index 4aa24105b..000000000 --- a/owl-base/Owl_computation_optimiser/Make/argument-1-Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_computation_optimiser.Make.Operator)

Parameter Make.Operator

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser/Make/index.html b/owl-base/Owl_computation_optimiser/Make/index.html deleted file mode 100644 index 5d7d3fae6..000000000 --- a/owl-base/Owl_computation_optimiser/Make/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Make (owl-base.Owl_computation_optimiser.Make)

Module Owl_computation_optimiser.Make

Parameters

Signature

module Operator = Operator
val _optimise_term : Operator.Symbol.Shape.Type.attr Owl_graph.node -> unit
val pattern_011 : Operator.Symbol.Shape.Type.op -> float -> float -> float
val pattern_013 : Operator.Symbol.Shape.Type.op -> float -> float
val pattern_021 : 'a -> 'b
val estimate_complexity : 'a Owl_graph.node array -> int * int
val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser_sig/.dummy b/owl-base/Owl_computation_optimiser_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Linalg/index.html b/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Linalg/index.html deleted file mode 100644 index 1e6999795..000000000 --- a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_computation_optimiser_sig.Sig.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Mat/index.html b/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Mat/index.html deleted file mode 100644 index 079e43721..000000000 --- a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_optimiser_sig.Sig.Operator.Mat)

Module Operator.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Scalar/index.html b/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Scalar/index.html deleted file mode 100644 index ee19378b6..000000000 --- a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_computation_optimiser_sig.Sig.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index 85185fe9e..000000000 --- a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_optimiser_sig.Sig.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 3157250d6..000000000 --- a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_optimiser_sig.Sig.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 2b668ca1d..000000000 --- a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_optimiser_sig.Sig.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index 0db2a9321..000000000 --- a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_optimiser_sig.Sig.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 0b0ae3dea..000000000 --- a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_optimiser_sig.Sig.Operator.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index 3ec181f2e..000000000 --- a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_optimiser_sig.Sig.Operator.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/index.html b/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/index.html deleted file mode 100644 index 6a9c4df6f..000000000 --- a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_optimiser_sig.Sig.Operator.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/index.html b/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/index.html deleted file mode 100644 index 43fdb8703..000000000 --- a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_computation_optimiser_sig.Sig.Operator.Symbol)

Module Operator.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/index.html b/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/index.html deleted file mode 100644 index afc278f61..000000000 --- a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_computation_optimiser_sig.Sig.Operator)

Module Sig.Operator

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/index.html b/owl-base/Owl_computation_optimiser_sig/module-type-Sig/index.html deleted file mode 100644 index 1610b3f1a..000000000 --- a/owl-base/Owl_computation_optimiser_sig/module-type-Sig/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Sig (owl-base.Owl_computation_optimiser_sig.Sig)

Module type Owl_computation_optimiser_sig.Sig

Core functions
val estimate_complexity : 'a Owl_graph.node array -> int * int

TODO

val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_shape/.dummy b/owl-base/Owl_computation_shape/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/Linalg/index.html deleted file mode 100644 index 219e54ec8..000000000 --- a/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_shape.Make.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/Mat/index.html b/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/Mat/index.html deleted file mode 100644 index 5f3f40ada..000000000 --- a/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_shape.Make.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/Scalar/index.html deleted file mode 100644 index 0c8a4b243..000000000 --- a/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_shape.Make.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/index.html b/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/index.html deleted file mode 100644 index f12057101..000000000 --- a/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_shape.Make.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/index.html b/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/index.html deleted file mode 100644 index b0698bb1f..000000000 --- a/owl-base/Owl_computation_shape/Make/argument-1-Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_shape.Make.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_shape/Make/argument-1-Type/index.html b/owl-base/Owl_computation_shape/Make/argument-1-Type/index.html deleted file mode 100644 index 79f78cb37..000000000 --- a/owl-base/Owl_computation_shape/Make/argument-1-Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_shape.Make.Type)

Parameter Make.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_shape/Make/index.html b/owl-base/Owl_computation_shape/Make/index.html deleted file mode 100644 index 423ec380c..000000000 --- a/owl-base/Owl_computation_shape/Make/index.html +++ /dev/null @@ -1,106 +0,0 @@ - -Make (owl-base.Owl_computation_shape.Make)

Module Owl_computation_shape.Make

Parameters

Signature

module Type = Type
val _infer_shape_00 : 'a -> 'b array option array
val _infer_shape_01 : 'a array option array array -> 'a array option array
val _infer_shape_02 : 'a array option array array -> 'a array option array
val _infer_shape_03 : int array option array array -> int array option array
val _infer_shape_04 : - int array option array array -> - int -> - int array option array
val _infer_shape_05 : - int array option array array -> - int array -> - int array option array
val _infer_shape_06 : - int array option array array -> - int array -> - int array option array
val _infer_shape_07 : - int array option array array -> - int -> - int array option array
val _infer_shape_08 : - int array option array array -> - int -> - int array -> - int array option array
val _infer_shape_09 : - 'a array option array array -> - int -> - 'b -> - 'a array option array
val _infer_shape_10 : - int array option array array -> - int array -> - int array option array
val _infer_shape_11 : - int array option array array -> - Owl_types.padding -> - int array -> - int array option array
val _infer_shape_12 : - int array option array array -> - Owl_types.padding -> - int array -> - int array option array
val _infer_shape_13 : - int array option array array -> - Owl_types.padding -> - int array -> - int array option array
val _infer_shape_14 : - int array option array array -> - Owl_types.padding -> - int array -> - int array option array
val _infer_shape_15 : - int array option array array -> - Owl_types.padding -> - int array -> - int array -> - int array option array
val _infer_shape_16 : - int array option array array -> - Owl_types.padding -> - int array -> - int array -> - int array option array
val _infer_shape_17 : - int array option array array -> - Owl_types.padding -> - int array -> - int array -> - int array option array
val _infer_shape_18 : - 'a array option array array -> - int array -> - 'a array option array
val _infer_shape_19 : 'a array option array array -> 'a array option array
val _infer_shape_20 : - int array option array array -> - int list list -> - int array option array
val _infer_shape_21 : - int array option array array -> - Owl_types.padding -> - int array -> - int array -> - int array option array
val _infer_shape_22 : - 'a array option array array -> - 'b -> - 'a array option array
val _infer_shape_23 : int array option array array -> int array option array
val _infer_shape_24 : - int array option array array -> - Owl_types.padding -> - int array -> - int array option array
val _infer_shape_25 : - int array option array array -> - Owl_types.padding -> - int array -> - int array option array
val _infer_shape_26 : - int array option array array -> - Owl_types.padding -> - int array -> - int array -> - int array option array
val _infer_shape_27 : - int array option array array -> - Owl_types.padding -> - int array -> - int array -> - int array option array
val _infer_shape_28 : - int array option array array -> - Owl_types.padding -> - int array -> - int array -> - int array option array
val _infer_shape_29 : - int array option array array -> - int array -> - int array option array
val _infer_shape_30 : - int array option array array -> - int list list -> - int array option array
val _infer_shape_31 : - bool -> - int array option array array -> - int -> - int array option array
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array
\ No newline at end of file diff --git a/owl-base/Owl_computation_shape_sig/.dummy b/owl-base/Owl_computation_shape_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/Linalg/index.html deleted file mode 100644 index 6e69356e3..000000000 --- a/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_shape_sig.Sig.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/Mat/index.html deleted file mode 100644 index d25a6a07e..000000000 --- a/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_shape_sig.Sig.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/Scalar/index.html deleted file mode 100644 index 4f535785e..000000000 --- a/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_shape_sig.Sig.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/index.html b/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/index.html deleted file mode 100644 index 0fa55a3ec..000000000 --- a/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_shape_sig.Sig.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/index.html b/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/index.html deleted file mode 100644 index 1f35a3da3..000000000 --- a/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_shape_sig.Sig.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/index.html b/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/index.html deleted file mode 100644 index da3288e63..000000000 --- a/owl-base/Owl_computation_shape_sig/module-type-Sig/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_shape_sig.Sig.Type)

Module Sig.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_shape_sig/module-type-Sig/index.html b/owl-base/Owl_computation_shape_sig/module-type-Sig/index.html deleted file mode 100644 index 1db92ef00..000000000 --- a/owl-base/Owl_computation_shape_sig/module-type-Sig/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Sig (owl-base.Owl_computation_shape_sig.Sig)

Module type Owl_computation_shape_sig.Sig

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol/.dummy b/owl-base/Owl_computation_symbol/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index aee205e81..000000000 --- a/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_symbol.Make.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 65d968ca3..000000000 --- a/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_symbol.Make.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 7538461ca..000000000 --- a/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_symbol.Make.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/index.html b/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/index.html deleted file mode 100644 index 6aeafc6ec..000000000 --- a/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_symbol.Make.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/index.html b/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/index.html deleted file mode 100644 index 91a938a2e..000000000 --- a/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_symbol.Make.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/index.html b/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/index.html deleted file mode 100644 index 2d3e3b5d9..000000000 --- a/owl-base/Owl_computation_symbol/Make/argument-1-Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_symbol.Make.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol/Make/argument-1-Shape/index.html b/owl-base/Owl_computation_symbol/Make/argument-1-Shape/index.html deleted file mode 100644 index fb5eb0a86..000000000 --- a/owl-base/Owl_computation_symbol/Make/argument-1-Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_symbol.Make.Shape)

Parameter Make.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol/Make/index.html b/owl-base/Owl_computation_symbol/Make/index.html deleted file mode 100644 index a79f870c2..000000000 --- a/owl-base/Owl_computation_symbol/Make/index.html +++ /dev/null @@ -1,19 +0,0 @@ - -Make (owl-base.Owl_computation_symbol.Make)

Module Owl_computation_symbol.Make

Parameters

Signature

module Shape = Shape
val op_to_str : Shape.Type.op -> string
val is_random_variable : Shape.Type.op -> bool
val refnum : 'a Owl_graph.node -> int
val node_shape : Shape.Type.attr Owl_graph.node -> int array
val node_numel : Shape.Type.attr Owl_graph.node -> int
val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool
val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit
val shape_to_str : int array option array -> string
val node_to_str : Shape.Type.attr Owl_graph.node -> string
val node_to_arr : Shape.Type.t -> Shape.Type.arr
val arr_to_node : Shape.Type.arr -> Shape.Type.t
val node_to_elt : Shape.Type.t -> Shape.Type.elt
val elt_to_node : Shape.Type.elt -> Shape.Type.t
val new_block_id : unit -> int
val make_empty_block : ?block_id:int -> int -> Shape.Type.block
val make_value_block : Shape.Type.Device.value -> Shape.Type.t -> unit
val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node
val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node
val var_arr : ?shape:int array -> string -> Shape.Type.arr
val var_elt : string -> Shape.Type.elt
val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr
val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt
val get_nodes_using_block : Shape.Type.block -> Shape.Type.t list
val _get_value_block : Shape.Type.block -> Shape.Type.Device.value
val get_block_opt : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block array option
val _set_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block array -> - unit
val add_node_to_block : Shape.Type.t -> Shape.Type.block -> unit
val get_active_node : Shape.Type.block -> Shape.Type.t option
val set_active_node : Shape.Type.block -> Shape.Type.t -> unit
val get_block_id : Shape.Type.attr Owl_graph.node -> int
val set_value : Shape.Type.t -> Shape.Type.Device.value array -> unit
val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit
val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit
val get_reuse : Shape.Type.attr Owl_graph.node -> bool
val is_shared : Shape.Type.attr Owl_graph.node -> bool
val get_shared_nodes : Shape.Type.t -> Shape.Type.t array
val is_var : Shape.Type.attr Owl_graph.node -> bool
val is_const : Shape.Type.attr Owl_graph.node -> bool
val is_node_arr : Shape.Type.attr Owl_graph.node -> bool
val is_node_elt : Shape.Type.attr Owl_graph.node -> bool
val is_assigned : Shape.Type.attr Owl_graph.node -> bool
val check_assigned : Shape.Type.attr Owl_graph.node -> unit
val is_valid : Shape.Type.attr Owl_graph.node -> bool
val validate : Shape.Type.attr Owl_graph.node -> unit
val invalidate : Shape.Type.attr Owl_graph.node -> unit
val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit
val is_freeze : Shape.Type.attr Owl_graph.node -> bool
val freeze : Shape.Type.attr Owl_graph.node -> unit
val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit
val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit
val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit
val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit
val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit
val float_to_elt : float -> Shape.Type.elt
val elt_to_float : Shape.Type.elt -> float
\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol_sig/.dummy b/owl-base/Owl_computation_symbol_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index 9a4db0cd6..000000000 --- a/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_symbol_sig.Sig.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 56d186cfd..000000000 --- a/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_symbol_sig.Sig.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 0919bbc05..000000000 --- a/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_symbol_sig.Sig.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/index.html b/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/index.html deleted file mode 100644 index 524f37be2..000000000 --- a/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_symbol_sig.Sig.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/index.html b/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/index.html deleted file mode 100644 index 8f72c8044..000000000 --- a/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_symbol_sig.Sig.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/index.html b/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/index.html deleted file mode 100644 index f9c3c283f..000000000 --- a/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_computation_symbol_sig.Sig.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/index.html b/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/index.html deleted file mode 100644 index 3c9735950..000000000 --- a/owl-base/Owl_computation_symbol_sig/module-type-Sig/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_computation_symbol_sig.Sig.Shape)

Module Sig.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_symbol_sig/module-type-Sig/index.html b/owl-base/Owl_computation_symbol_sig/module-type-Sig/index.html deleted file mode 100644 index 99e419267..000000000 --- a/owl-base/Owl_computation_symbol_sig/module-type-Sig/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Sig (owl-base.Owl_computation_symbol_sig.Sig)

Module type Owl_computation_symbol_sig.Sig

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_type/.dummy b/owl-base/Owl_computation_type/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_type/Make/argument-1-Device/A/Linalg/index.html b/owl-base/Owl_computation_type/Make/argument-1-Device/A/Linalg/index.html deleted file mode 100644 index 2272fd2c9..000000000 --- a/owl-base/Owl_computation_type/Make/argument-1-Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_type.Make.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_type/Make/argument-1-Device/A/Mat/index.html b/owl-base/Owl_computation_type/Make/argument-1-Device/A/Mat/index.html deleted file mode 100644 index 0001f330e..000000000 --- a/owl-base/Owl_computation_type/Make/argument-1-Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_type.Make.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_type/Make/argument-1-Device/A/Scalar/index.html b/owl-base/Owl_computation_type/Make/argument-1-Device/A/Scalar/index.html deleted file mode 100644 index 5bf868c2d..000000000 --- a/owl-base/Owl_computation_type/Make/argument-1-Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_type.Make.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_type/Make/argument-1-Device/A/index.html b/owl-base/Owl_computation_type/Make/argument-1-Device/A/index.html deleted file mode 100644 index d05da981f..000000000 --- a/owl-base/Owl_computation_type/Make/argument-1-Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_type.Make.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_type/Make/argument-1-Device/index.html b/owl-base/Owl_computation_type/Make/argument-1-Device/index.html deleted file mode 100644 index 8924af543..000000000 --- a/owl-base/Owl_computation_type/Make/argument-1-Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_type.Make.Device)

Parameter Make.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_type/Make/index.html b/owl-base/Owl_computation_type/Make/index.html deleted file mode 100644 index 90901957c..000000000 --- a/owl-base/Owl_computation_type/Make/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Make (owl-base.Owl_computation_type.Make)

Module Owl_computation_type.Make

Parameters

Signature

module Device = Device
type state =
  1. | Valid
  2. | Invalid
and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}
and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}
and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types.index list
  18. | SetFancy of Owl_types.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types.padding * int array
  114. | Conv2d of Owl_types.padding * int array
  115. | Conv3d of Owl_types.padding * int array
  116. | TransposeConv1d of Owl_types.padding * int array
  117. | TransposeConv2d of Owl_types.padding * int array
  118. | TransposeConv3d of Owl_types.padding * int array
  119. | DilatedConv1d of Owl_types.padding * int array * int array
  120. | DilatedConv2d of Owl_types.padding * int array * int array
  121. | DilatedConv3d of Owl_types.padding * int array * int array
  122. | MaxPool1d of Owl_types.padding * int array * int array
  123. | MaxPool2d of Owl_types.padding * int array * int array
  124. | MaxPool3d of Owl_types.padding * int array * int array
  125. | AvgPool1d of Owl_types.padding * int array * int array
  126. | AvgPool2d of Owl_types.padding * int array * int array
  127. | AvgPool3d of Owl_types.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
\ No newline at end of file diff --git a/owl-base/Owl_computation_type_sig/.dummy b/owl-base/Owl_computation_type_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/Linalg/index.html b/owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/Linalg/index.html deleted file mode 100644 index d7ad177b1..000000000 --- a/owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_computation_type_sig.Sig.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/Mat/index.html b/owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/Mat/index.html deleted file mode 100644 index 4216854cc..000000000 --- a/owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_computation_type_sig.Sig.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/Scalar/index.html b/owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/Scalar/index.html deleted file mode 100644 index e0275199b..000000000 --- a/owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_computation_type_sig.Sig.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/index.html b/owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/index.html deleted file mode 100644 index 567821cda..000000000 --- a/owl-base/Owl_computation_type_sig/module-type-Sig/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_computation_type_sig.Sig.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_computation_type_sig/module-type-Sig/Device/index.html b/owl-base/Owl_computation_type_sig/module-type-Sig/Device/index.html deleted file mode 100644 index b1fe502aa..000000000 --- a/owl-base/Owl_computation_type_sig/module-type-Sig/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_computation_type_sig.Sig.Device)

Module Sig.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_computation_type_sig/module-type-Sig/index.html b/owl-base/Owl_computation_type_sig/module-type-Sig/index.html deleted file mode 100644 index 9af718c37..000000000 --- a/owl-base/Owl_computation_type_sig/module-type-Sig/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Sig (owl-base.Owl_computation_type_sig.Sig)

Module type Owl_computation_type_sig.Sig

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_const/.dummy b/owl-base/Owl_const/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_const/CGS/index.html b/owl-base/Owl_const/CGS/index.html deleted file mode 100644 index 2eb90bb32..000000000 --- a/owl-base/Owl_const/CGS/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -CGS (owl-base.Owl_const.CGS)

Module Owl_const.CGS

val speed_of_light : float

speed_of_light = 2.99792458e10

val gravitational_constant : float

gravitational_constant = 6.673e-8

val plancks_constant_h : float

plancks_constant_h = 6.62606896e-27

val plancks_constant_hbar : float

plancks_constant_hbar = 1.05457162825e-27

val astronomical_unit : float

astronomical_unit = 1.49597870691e13

val light_year : float

light_year = 9.46053620707e17

val parsec : float

parsec = 3.08567758135e18

val grav_accel : float

grav_accel = 9.80665e2

val electron_volt : float

electron_volt = 1.602176487e-12

val mass_electron : float

mass_electron = 9.10938188e-28

val mass_muon : float

mass_muon = 1.88353109e-25

val mass_proton : float

mass_proton = 1.67262158e-24

val mass_neutron : float

mass_neutron = 1.67492716e-24

val rydberg : float

rydberg = 2.17987196968e-11

val boltzmann : float

boltzmann = 1.3806504e-16

val molar_gas : float

molar_gas = 8.314472e7

val standard_gas_volume : float

standard_gas_volume = 2.2710981e4

val minute : float

minute = 6e1

val hour : float

hour = 3.6e3

val day : float

day = 8.64e4

val week : float

week = 6.048e5

val inch : float

inch = 2.54e0

val foot : float

foot = 3.048e1

val yard : float

yard = 9.144e1

val mile : float

mile = 1.609344e5

val nautical_mile : float

nautical_mile = 1.852e5

val fathom : float

fathom = 1.8288e2

val mil : float

mil = 2.54e-3

val point : float

point = 3.52777777778e-2

val texpoint : float

texpoint = 3.51459803515e-2

val micron : float

micron = 1e-4

val angstrom : float

angstrom = 1e-8

val hectare : float

hectare = 1e8

val acre : float

acre = 4.04685642241e7

val barn : float

barn = 1e-24

val liter : float

liter = 1e3

val us_gallon : float

us_gallon = 3.78541178402e3

val quart : float

quart = 9.46352946004e2

val pint : float

pint = 4.73176473002e2

val cup : float

cup = 2.36588236501e2

val fluid_ounce : float

fluid_ounce = 2.95735295626e1

val tablespoon : float

tablespoon = 1.47867647813e1

val teaspoon : float

teaspoon = 4.92892159375e0

val canadian_gallon : float

canadian_gallon = 4.54609e3

val uk_gallon : float

uk_gallon = 4.546092e3

val miles_per_hour : float

miles_per_hour = 4.4704e1

val kilometers_per_hour : float

kilometers_per_hour = 2.77777777778e1

val knot : float

knot = 5.14444444444e1

val pound_mass : float

pound_mass = 4.5359237e2

val ounce_mass : float

ounce_mass = 2.8349523125e1

val ton : float

ton = 9.0718474e5

val metric_ton : float

metric_ton = 1e6

val uk_ton : float

uk_ton = 1.0160469088e6

val troy_ounce : float

troy_ounce = 3.1103475e1

val carat : float

carat = 2e-1

val unified_atomic_mass : float

unified_atomic_mass = 1.660538782e-24

val gram_force : float

gram_force = 9.80665e2

val pound_force : float

pound_force = 4.44822161526e5

val kilopound_force : float

kilopound_force = 4.44822161526e8

val poundal : float

poundal = 1.38255e4

val calorie : float

calorie = 4.1868e7

val btu : float

btu = 1.05505585262e10

val therm : float

therm = 1.05506e15

val horsepower : float

horsepower = 7.457e9

val bar : float

bar = 1e6

val std_atmosphere : float

std_atmosphere = 1.01325e6

val torr : float

torr = 1.33322368421e3

val meter_of_mercury : float

meter_of_mercury = 1.33322368421e6

val inch_of_mercury : float

inch_of_mercury = 3.38638815789e4

val inch_of_water : float

inch_of_water = 2.490889e3

val psi : float

psi = 6.89475729317e4

val poise : float

poise = 1e0

val stokes : float

stokes = 1e0

val stilb : float

stilb = 1e0

val lumen : float

lumen = 1e0

val lux : float

lux = 1e-4

val phot : float

phot = 1e0

val footcandle : float

footcandle = 1.076e-3

val lambert : float

lambert = 1e0

val footlambert : float

footlambert = 1.07639104e-3

val curie : float

curie = 3.7e10

val roentgen : float

roentgen = 2.58e-7

val rad : float

rad = 1e2

val solar_mass : float

solar_mass = 1.98892e33

val bohr_radius : float

bohr_radius = 5.291772083e-9

val newton : float

newton = 1e5

val dyne : float

dyne = 1e0

val joule : float

joule = 1e7

val erg : float

erg = 1e0

val stefan_boltzmann_constant : float

stefan_boltzmann_constant = 5.67040047374e-5

val thomson_cross_section : float

thomson_cross_section = 6.65245893699e-25

\ No newline at end of file diff --git a/owl-base/Owl_const/CGSM/index.html b/owl-base/Owl_const/CGSM/index.html deleted file mode 100644 index 7c77d1b36..000000000 --- a/owl-base/Owl_const/CGSM/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -CGSM (owl-base.Owl_const.CGSM)

Module Owl_const.CGSM

val speed_of_light : float

speed_of_light = 2.99792458e10

val gravitational_constant : float

gravitational_constant = 6.673e-8

val plancks_constant_h : float

plancks_constant_h = 6.62606896e-27

val plancks_constant_hbar : float

plancks_constant_hbar = 1.05457162825e-27

val astronomical_unit : float

astronomical_unit = 1.49597870691e13

val light_year : float

light_year = 9.46053620707e17

val parsec : float

parsec = 3.08567758135e18

val grav_accel : float

grav_accel = 9.80665e2

val electron_volt : float

electron_volt = 1.602176487e-12

val mass_electron : float

mass_electron = 9.10938188e-28

val mass_muon : float

mass_muon = 1.88353109e-25

val mass_proton : float

mass_proton = 1.67262158e-24

val mass_neutron : float

mass_neutron = 1.67492716e-24

val rydberg : float

rydberg = 2.17987196968e-11

val boltzmann : float

boltzmann = 1.3806504e-16

val molar_gas : float

molar_gas = 8.314472e7

val standard_gas_volume : float

standard_gas_volume = 2.2710981e4

val minute : float

minute = 6e1

val hour : float

hour = 3.6e3

val day : float

day = 8.64e4

val week : float

week = 6.048e5

val inch : float

inch = 2.54e0

val foot : float

foot = 3.048e1

val yard : float

yard = 9.144e1

val mile : float

mile = 1.609344e5

val nautical_mile : float

nautical_mile = 1.852e5

val fathom : float

fathom = 1.8288e2

val mil : float

mil = 2.54e-3

val point : float

point = 3.52777777778e-2

val texpoint : float

texpoint = 3.51459803515e-2

val micron : float

micron = 1e-4

val angstrom : float

angstrom = 1e-8

val hectare : float

hectare = 1e8

val acre : float

acre = 4.04685642241e7

val barn : float

barn = 1e-24

val liter : float

liter = 1e3

val us_gallon : float

us_gallon = 3.78541178402e3

val quart : float

quart = 9.46352946004e2

val pint : float

pint = 4.73176473002e2

val cup : float

cup = 2.36588236501e2

val fluid_ounce : float

fluid_ounce = 2.95735295626e1

val tablespoon : float

tablespoon = 1.47867647813e1

val teaspoon : float

teaspoon = 4.92892159375e0

val canadian_gallon : float

canadian_gallon = 4.54609e3

val uk_gallon : float

uk_gallon = 4.546092e3

val miles_per_hour : float

miles_per_hour = 4.4704e1

val kilometers_per_hour : float

kilometers_per_hour = 2.77777777778e1

val knot : float

knot = 5.14444444444e1

val pound_mass : float

pound_mass = 4.5359237e2

val ounce_mass : float

ounce_mass = 2.8349523125e1

val ton : float

ton = 9.0718474e5

val metric_ton : float

metric_ton = 1e6

val uk_ton : float

uk_ton = 1.0160469088e6

val troy_ounce : float

troy_ounce = 3.1103475e1

val carat : float

carat = 2e-1

val unified_atomic_mass : float

unified_atomic_mass = 1.660538782e-24

val gram_force : float

gram_force = 9.80665e2

val pound_force : float

pound_force = 4.44822161526e5

val kilopound_force : float

kilopound_force = 4.44822161526e8

val poundal : float

poundal = 1.38255e4

val calorie : float

calorie = 4.1868e7

val btu : float

btu = 1.05505585262e10

val therm : float

therm = 1.05506e15

val horsepower : float

horsepower = 7.457e9

val bar : float

bar = 1e6

val std_atmosphere : float

std_atmosphere = 1.01325e6

val torr : float

torr = 1.33322368421e3

val meter_of_mercury : float

meter_of_mercury = 1.33322368421e6

val inch_of_mercury : float

inch_of_mercury = 3.38638815789e4

val inch_of_water : float

inch_of_water = 2.490889e3

val psi : float

psi = 6.89475729317e4

val poise : float

poise = 1e0

val stokes : float

stokes = 1e0

val stilb : float

stilb = 1e0

val lumen : float

lumen = 1e0

val lux : float

lux = 1e-4

val phot : float

phot = 1e0

val footcandle : float

footcandle = 1.076e-3

val lambert : float

lambert = 1e0

val footlambert : float

footlambert = 1.07639104e-3

val curie : float

curie = 3.7e10

val roentgen : float

roentgen = 2.58e-8

val rad : float

rad = 1e2

val solar_mass : float

solar_mass = 1.98892e33

val bohr_radius : float

bohr_radius = 5.291772083e-9

val newton : float

newton = 1e5

val dyne : float

dyne = 1e0

val joule : float

joule = 1e7

val erg : float

erg = 1e0

val stefan_boltzmann_constant : float

stefan_boltzmann_constant = 5.67040047374e-5

val thomson_cross_section : float

thomson_cross_section = 6.65245893699e-25

val bohr_magneton : float

bohr_magneton = 9.27400899e-21

val nuclear_magneton : float

nuclear_magneton = 5.05078317e-24

val electron_magnetic_moment : float

electron_magnetic_moment = 9.28476362e-21

val proton_magnetic_moment : float

proton_magnetic_moment = 1.410606633e-23

val faraday : float

faraday = 9.64853429775e3

val electron_charge : float

electron_charge = 1.602176487e-20

\ No newline at end of file diff --git a/owl-base/Owl_const/MKS/index.html b/owl-base/Owl_const/MKS/index.html deleted file mode 100644 index 582899bb9..000000000 --- a/owl-base/Owl_const/MKS/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MKS (owl-base.Owl_const.MKS)

Module Owl_const.MKS

val speed_of_light : float

speed_of_light = 2.99792458e8

val gravitational_constant : float

gravitational_constant = 6.673e-11

val plancks_constant_h : float

plancks_constant_h = 6.62606896e-34

val plancks_constant_hbar : float

plancks_constant_hbar = 1.05457162825e-34

val astronomical_unit : float

astronomical_unit = 1.49597870691e11

val light_year : float

light_year = 9.46053620707e15

val parsec : float

parsec = 3.08567758135e16

val grav_accel : float

grav_accel = 9.80665e0

val electron_volt : float

electron_volt = 1.602176487e-19

val mass_electron : float

mass_electron = 9.10938188e-31

val mass_muon : float

mass_muon = 1.88353109e-28

val mass_proton : float

mass_proton = 1.67262158e-27

val mass_neutron : float

mass_neutron = 1.67492716e-27

val rydberg : float

rydberg = 2.17987196968e-18

val boltzmann : float

boltzmann = 1.3806504e-23

val molar_gas : float

molar_gas = 8.314472e0

val standard_gas_volume : float

standard_gas_volume = 2.2710981e-2

val minute : float

minute = 6e1

val hour : float

hour = 3.6e3

val day : float

day = 8.64e4

val week : float

week = 6.048e5

val inch : float

inch = 2.54e-2

val foot : float

foot = 3.048e-1

val yard : float

yard = 9.144e-1

val mile : float

mile = 1.609344e3

val nautical_mile : float

nautical_mile = 1.852e3

val fathom : float

fathom = 1.8288e0

val mil : float

mil = 2.54e-5

val point : float

point = 3.52777777778e-4

val texpoint : float

texpoint = 3.51459803515e-4

val micron : float

micron = 1e-6

val angstrom : float

angstrom = 1e-10

val hectare : float

hectare = 1e4

val acre : float

acre = 4.04685642241e3

val barn : float

barn = 1e-28

val liter : float

liter = 1e-3

val us_gallon : float

us_gallon = 3.78541178402e-3

val quart : float

quart = 9.46352946004e-4

val pint : float

pint = 4.73176473002e-4

val cup : float

cup = 2.36588236501e-4

val fluid_ounce : float

fluid_ounce = 2.95735295626e-5

val tablespoon : float

tablespoon = 1.47867647813e-5

val teaspoon : float

teaspoon = 4.92892159375e-6

val canadian_gallon : float

canadian_gallon = 4.54609e-3

val uk_gallon : float

uk_gallon = 4.546092e-3

val miles_per_hour : float

miles_per_hour = 4.4704e-1

val kilometers_per_hour : float

kilometers_per_hour = 2.77777777778e-1

val knot : float

knot = 5.14444444444e-1

val pound_mass : float

pound_mass = 4.5359237e-1

val ounce_mass : float

ounce_mass = 2.8349523125e-2

val ton : float

ton = 9.0718474e2

val metric_ton : float

metric_ton = 1e3

val uk_ton : float

uk_ton = 1.0160469088e3

val troy_ounce : float

troy_ounce = 3.1103475e-2

val carat : float

carat = 2e-4

val unified_atomic_mass : float

unified_atomic_mass = 1.660538782e-27

val gram_force : float

gram_force = 9.80665e-3

val pound_force : float

pound_force = 4.44822161526e0

val kilopound_force : float

kilopound_force = 4.44822161526e3

val poundal : float

poundal = 1.38255e-1

val calorie : float

calorie = 4.1868e0

val btu : float

btu = 1.05505585262e3

val therm : float

therm = 1.05506e8

val horsepower : float

horsepower = 7.457e2

val bar : float

bar = 1e5

val std_atmosphere : float

std_atmosphere = 1.01325e5

val torr : float

torr = 1.33322368421e2

val meter_of_mercury : float

meter_of_mercury = 1.33322368421e5

val inch_of_mercury : float

inch_of_mercury = 3.38638815789e3

val inch_of_water : float

inch_of_water = 2.490889e2

val psi : float

psi = 6.89475729317e3

val poise : float

poise = 1e-1

val stokes : float

stokes = 1e-4

val stilb : float

stilb = 1e4

val lumen : float

lumen = 1e0

val lux : float

lux = 1e0

val phot : float

phot = 1e4

val footcandle : float

footcandle = 1.076e1

val lambert : float

lambert = 1e4

val footlambert : float

footlambert = 1.07639104e1

val curie : float

curie = 3.7e10

val roentgen : float

roentgen = 2.58e-4

val rad : float

rad = 1e-2

val solar_mass : float

solar_mass = 1.98892e30

val bohr_radius : float

bohr_radius = 5.291772083e-11

val newton : float

newton = 1e0

val dyne : float

dyne = 1e-5

val joule : float

joule = 1e0

val erg : float

erg = 1e-7

val stefan_boltzmann_constant : float

stefan_boltzmann_constant = 5.67040047374e-8

val thomson_cross_section : float

thomson_cross_section = 6.65245893699e-29

val bohr_magneton : float

bohr_magneton = 9.27400899e-24

val nuclear_magneton : float

nuclear_magneton = 5.05078317e-27

val electron_magnetic_moment : float

electron_magnetic_moment = 9.28476362e-24

val proton_magnetic_moment : float

proton_magnetic_moment = 1.410606633e-26

val faraday : float

faraday = 9.64853429775e4

val electron_charge : float

electron_charge = 1.602176487e-19

val vacuum_permittivity : float

vacuum_permittivity = 8.854187817e-12

val vacuum_permeability : float

vacuum_permeability = 1.25663706144e-6

val debye : float

debye = 3.33564095198e-30

val gauss : float

gauss = 1e-4

\ No newline at end of file diff --git a/owl-base/Owl_const/Prefix/index.html b/owl-base/Owl_const/Prefix/index.html deleted file mode 100644 index 6cc0f5893..000000000 --- a/owl-base/Owl_const/Prefix/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Prefix (owl-base.Owl_const.Prefix)

Module Owl_const.Prefix

val fine_structure : float

fine_structure = 7.297352533e-3

val avogadro : float

avogadro = 6.02214199e23

val yotta : float

yotta = 1e24

val zetta : float

zetta = 1e21

val exa : float

exa = 1e18

val peta : float

peta = 1e15

val tera : float

tera = 1e12

val giga : float

giga = 1e9

val mega : float

mega = 1e6

val kilo : float

kilo = 1e3

val hecto : float

hecto = 1e2

val deca : float

deca = 1e1

val deci : float

deci = 1e-1

val centi : float

centi = 1e-2

val milli : float

milli = 1e-3

val micro : float

micro = 1e-6

val nano : float

nano = 1e-9

val pico : float

pico = 1e-12

val femto : float

femto = 1e-15

val atto : float

atto = 1e-18

val zepto : float

zepto = 1e-21

val yocto : float

yocto = 1e-24

\ No newline at end of file diff --git a/owl-base/Owl_const/SI/index.html b/owl-base/Owl_const/SI/index.html deleted file mode 100644 index 1db233f47..000000000 --- a/owl-base/Owl_const/SI/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -SI (owl-base.Owl_const.SI)

Module Owl_const.SI

val speed_of_light : float

speed_of_light = 2.99792458e8

val gravitational_constant : float

gravitational_constant = 6.673e-11

val plancks_constant_h : float

plancks_constant_h = 6.62606896e-34

val plancks_constant_hbar : float

plancks_constant_hbar = 1.05457162825e-34

val astronomical_unit : float

astronomical_unit = 1.49597870691e11

val light_year : float

light_year = 9.46053620707e15

val parsec : float

parsec = 3.08567758135e16

val grav_accel : float

grav_accel = 9.80665e0

val electron_volt : float

electron_volt = 1.602176487e-19

val mass_electron : float

mass_electron = 9.10938188e-31

val mass_muon : float

mass_muon = 1.88353109e-28

val mass_proton : float

mass_proton = 1.67262158e-27

val mass_neutron : float

mass_neutron = 1.67492716e-27

val rydberg : float

rydberg = 2.17987196968e-18

val boltzmann : float

boltzmann = 1.3806504e-23

val molar_gas : float

molar_gas = 8.314472e0

val standard_gas_volume : float

standard_gas_volume = 2.2710981e-2

val minute : float

minute = 6e1

val hour : float

hour = 3.6e3

val day : float

day = 8.64e4

val week : float

week = 6.048e5

val inch : float

inch = 2.54e-2

val foot : float

foot = 3.048e-1

val yard : float

yard = 9.144e-1

val mile : float

mile = 1.609344e3

val nautical_mile : float

nautical_mile = 1.852e3

val fathom : float

fathom = 1.8288e0

val mil : float

mil = 2.54e-5

val point : float

point = 3.52777777778e-4

val texpoint : float

texpoint = 3.51459803515e-4

val micron : float

micron = 1e-6

val angstrom : float

angstrom = 1e-10

val hectare : float

hectare = 1e4

val acre : float

acre = 4.04685642241e3

val barn : float

barn = 1e-28

val liter : float

liter = 1e-3

val us_gallon : float

us_gallon = 3.78541178402e-3

val quart : float

quart = 9.46352946004e-4

val pint : float

pint = 4.73176473002e-4

val cup : float

cup = 2.36588236501e-4

val fluid_ounce : float

fluid_ounce = 2.95735295626e-5

val tablespoon : float

tablespoon = 1.47867647813e-5

val teaspoon : float

teaspoon = 4.92892159375e-6

val canadian_gallon : float

canadian_gallon = 4.54609e-3

val uk_gallon : float

uk_gallon = 4.546092e-3

val miles_per_hour : float

miles_per_hour = 4.4704e-1

val kilometers_per_hour : float

kilometers_per_hour = 2.77777777778e-1

val knot : float

knot = 5.14444444444e-1

val pound_mass : float

pound_mass = 4.5359237e-1

val ounce_mass : float

ounce_mass = 2.8349523125e-2

val ton : float

ton = 9.0718474e2

val metric_ton : float

metric_ton = 1e3

val uk_ton : float

uk_ton = 1.0160469088e3

val troy_ounce : float

troy_ounce = 3.1103475e-2

val carat : float

carat = 2e-4

val unified_atomic_mass : float

unified_atomic_mass = 1.660538782e-27

val gram_force : float

gram_force = 9.80665e-3

val pound_force : float

pound_force = 4.44822161526e0

val kilopound_force : float

kilopound_force = 4.44822161526e3

val poundal : float

poundal = 1.38255e-1

val calorie : float

calorie = 4.1868e0

val btu : float

btu = 1.05505585262e3

val therm : float

therm = 1.05506e8

val horsepower : float

horsepower = 7.457e2

val bar : float

bar = 1e5

val std_atmosphere : float

std_atmosphere = 1.01325e5

val torr : float

torr = 1.33322368421e2

val meter_of_mercury : float

meter_of_mercury = 1.33322368421e5

val inch_of_mercury : float

inch_of_mercury = 3.38638815789e3

val inch_of_water : float

inch_of_water = 2.490889e2

val psi : float

psi = 6.89475729317e3

val poise : float

poise = 1e-1

val stokes : float

stokes = 1e-4

val stilb : float

stilb = 1e4

val lumen : float

lumen = 1e0

val lux : float

lux = 1e0

val phot : float

phot = 1e4

val footcandle : float

footcandle = 1.076e1

val lambert : float

lambert = 1e4

val footlambert : float

footlambert = 1.07639104e1

val curie : float

curie = 3.7e10

val roentgen : float

roentgen = 2.58e-4

val rad : float

rad = 1e-2

val solar_mass : float

solar_mass = 1.98892e30

val bohr_radius : float

bohr_radius = 5.291772083e-11

val newton : float

newton = 1e0

val dyne : float

dyne = 1e-5

val joule : float

joule = 1e0

val erg : float

erg = 1e-7

val stefan_boltzmann_constant : float

stefan_boltzmann_constant = 5.67040047374e-8

val thomson_cross_section : float

thomson_cross_section = 6.65245893699e-29

val bohr_magneton : float

bohr_magneton = 9.27400899e-24

val nuclear_magneton : float

nuclear_magneton = 5.05078317e-27

val electron_magnetic_moment : float

electron_magnetic_moment = 9.28476362e-24

val proton_magnetic_moment : float

proton_magnetic_moment = 1.410606633e-26

val faraday : float

faraday = 9.64853429775e4

val electron_charge : float

electron_charge = 1.602176487e-19

val vacuum_permittivity : float

vacuum_permittivity = 8.854187817e-12

val vacuum_permeability : float

vacuum_permeability = 1.25663706144e-6

val debye : float

debye = 3.33564095198e-30

val gauss : float

gauss = 1e-4

\ No newline at end of file diff --git a/owl-base/Owl_countmin_sketch/.dummy b/owl-base/Owl_countmin_sketch/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_countmin_sketch/Make/argument-1-T/index.html b/owl-base/Owl_countmin_sketch/Make/argument-1-T/index.html deleted file mode 100644 index 436bf85b2..000000000 --- a/owl-base/Owl_countmin_sketch/Make/argument-1-T/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -T (owl-base.Owl_countmin_sketch.Make.T)

Parameter Make.T

Type definition
type t

The type of count-min tables

Core functions
val init : int -> int -> t

init l w generates a table with length l and width w, all counters initialized to 0.

val incr : int -> int -> t -> unit

incr i j t increments the counter at length index i and width index j in table t.

val get : int -> int -> t -> int

get i j t gets the value of the counter at length index i and width index j in table t.

val clone : t -> t

clone t returns a new table with the same contents as t.

val merge : t -> t -> t

merge t1 t2 merges tables t1 and t2 element-wise. If t1 and t2 have the same dimensions, returns a new table whose elements are the sums of corresponding elements from t1 and t2. If dimensions do not match, raises INVALID_ARGUMENT.

\ No newline at end of file diff --git a/owl-base/Owl_countmin_sketch/Make/index.html b/owl-base/Owl_countmin_sketch/Make/index.html deleted file mode 100644 index 60c7f1428..000000000 --- a/owl-base/Owl_countmin_sketch/Make/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make (owl-base.Owl_countmin_sketch.Make)

Module Owl_countmin_sketch.Make

Parameters

Signature

Type definition
type 'a sketch

The type of Count-Min sketches

Core functions
val init : epsilon:float -> delta:float -> 'a sketch

init epsilon delta initializes a sketch with approximation ratio (1 + epsilon) and failure probability delta.

val incr : 'a sketch -> 'a -> unit

incr s x increments the frequency count of x in sketch s in-place.

val count : 'a sketch -> 'a -> int

count s x returns the estimated frequency of element x in s.

val init_from : 'a sketch -> 'a sketch

init_from s initializes a new empty sketch with the same parameters as s, which can later be merged with s.

val merge : 'a sketch -> 'a sketch -> 'a sketch

merge s1 s2 returns a new sketch whose counts are the sum of those in s1 and s2. Raises INVALID_ARGUMENT if the parameters of s1 and s2 do not match.

\ No newline at end of file diff --git a/owl-base/Owl_countmin_sketch/Native/index.html b/owl-base/Owl_countmin_sketch/Native/index.html deleted file mode 100644 index ef7bbeabd..000000000 --- a/owl-base/Owl_countmin_sketch/Native/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Native (owl-base.Owl_countmin_sketch.Native)

Module Owl_countmin_sketch.Native

val init : epsilon:float -> delta:float -> 'a sketch
val incr : 'a sketch -> 'a -> unit
val count : 'a sketch -> 'a -> int
val init_from : 'a sketch -> 'a sketch
val merge : 'a sketch -> 'a sketch -> 'a sketch
\ No newline at end of file diff --git a/owl-base/Owl_countmin_sketch/Owl/index.html b/owl-base/Owl_countmin_sketch/Owl/index.html deleted file mode 100644 index a1bc91a38..000000000 --- a/owl-base/Owl_countmin_sketch/Owl/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Owl (owl-base.Owl_countmin_sketch.Owl)

Module Owl_countmin_sketch.Owl

val init : epsilon:float -> delta:float -> 'a sketch
val incr : 'a sketch -> 'a -> unit
val count : 'a sketch -> 'a -> int
val init_from : 'a sketch -> 'a sketch
val merge : 'a sketch -> 'a sketch -> 'a sketch
\ No newline at end of file diff --git a/owl-base/Owl_countmin_sketch_sig/.dummy b/owl-base/Owl_countmin_sketch_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_countmin_sketch_sig/module-type-Sig/index.html b/owl-base/Owl_countmin_sketch_sig/module-type-Sig/index.html deleted file mode 100644 index 5c48a6af3..000000000 --- a/owl-base/Owl_countmin_sketch_sig/module-type-Sig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Sig (owl-base.Owl_countmin_sketch_sig.Sig)

Module type Owl_countmin_sketch_sig.Sig

Type definition
type 'a sketch

The type of Count-Min sketches

Core functions
val init : epsilon:float -> delta:float -> 'a sketch

init epsilon delta initializes a sketch with approximation ratio (1 + epsilon) and failure probability delta.

val incr : 'a sketch -> 'a -> unit

incr s x increments the frequency count of x in sketch s in-place.

val count : 'a sketch -> 'a -> int

count s x returns the estimated frequency of element x in s.

val init_from : 'a sketch -> 'a sketch

init_from s initializes a new empty sketch with the same parameters as s, which can later be merged with s.

val merge : 'a sketch -> 'a sketch -> 'a sketch

merge s1 s2 returns a new sketch whose counts are the sum of those in s1 and s2. Raises INVALID_ARGUMENT if the parameters of s1 and s2 do not match.

\ No newline at end of file diff --git a/owl-base/Owl_countmin_table/.dummy b/owl-base/Owl_countmin_table/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_countmin_table/Native/index.html b/owl-base/Owl_countmin_table/Native/index.html deleted file mode 100644 index 848dab3be..000000000 --- a/owl-base/Owl_countmin_table/Native/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Native (owl-base.Owl_countmin_table.Native)

Module Owl_countmin_table.Native

Type definition
type t

The type of count-min tables

Core functions
val init : int -> int -> t

init l w generates a table with length l and width w, all counters initialized to 0.

val incr : int -> int -> t -> unit

incr i j t increments the counter at length index i and width index j in table t.

val get : int -> int -> t -> int

get i j t gets the value of the counter at length index i and width index j in table t.

val clone : t -> t

clone t returns a new table with the same contents as t.

val merge : t -> t -> t

merge t1 t2 merges tables t1 and t2 element-wise. If t1 and t2 have the same dimensions, returns a new table whose elements are the sums of corresponding elements from t1 and t2. If dimensions do not match, raises INVALID_ARGUMENT.

\ No newline at end of file diff --git a/owl-base/Owl_countmin_table/Owl/index.html b/owl-base/Owl_countmin_table/Owl/index.html deleted file mode 100644 index 8b700f3df..000000000 --- a/owl-base/Owl_countmin_table/Owl/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Owl (owl-base.Owl_countmin_table.Owl)

Module Owl_countmin_table.Owl

Type definition
type t

The type of count-min tables

Core functions
val init : int -> int -> t

init l w generates a table with length l and width w, all counters initialized to 0.

val incr : int -> int -> t -> unit

incr i j t increments the counter at length index i and width index j in table t.

val get : int -> int -> t -> int

get i j t gets the value of the counter at length index i and width index j in table t.

val clone : t -> t

clone t returns a new table with the same contents as t.

val merge : t -> t -> t

merge t1 t2 merges tables t1 and t2 element-wise. If t1 and t2 have the same dimensions, returns a new table whose elements are the sums of corresponding elements from t1 and t2. If dimensions do not match, raises INVALID_ARGUMENT.

\ No newline at end of file diff --git a/owl-base/Owl_countmin_table/module-type-Sig/index.html b/owl-base/Owl_countmin_table/module-type-Sig/index.html deleted file mode 100644 index fe6693f47..000000000 --- a/owl-base/Owl_countmin_table/module-type-Sig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Sig (owl-base.Owl_countmin_table.Sig)

Module type Owl_countmin_table.Sig

Type definition
type t

The type of count-min tables

Core functions
val init : int -> int -> t

init l w generates a table with length l and width w, all counters initialized to 0.

val incr : int -> int -> t -> unit

incr i j t increments the counter at length index i and width index j in table t.

val get : int -> int -> t -> int

get i j t gets the value of the counter at length index i and width index j in table t.

val clone : t -> t

clone t returns a new table with the same contents as t.

val merge : t -> t -> t

merge t1 t2 merges tables t1 and t2 element-wise. If t1 and t2 have the same dimensions, returns a new table whose elements are the sums of corresponding elements from t1 and t2. If dimensions do not match, raises INVALID_ARGUMENT.

\ No newline at end of file diff --git a/owl-base/Owl_dataframe/.dummy b/owl-base/Owl_dataframe/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_exception/.dummy b/owl-base/Owl_exception/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_graph/.dummy b/owl-base/Owl_graph/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_heavyhitters_sketch/.dummy b/owl-base/Owl_heavyhitters_sketch/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_heavyhitters_sketch/Make/argument-1-CM/index.html b/owl-base/Owl_heavyhitters_sketch/Make/argument-1-CM/index.html deleted file mode 100644 index 0d075f1cc..000000000 --- a/owl-base/Owl_heavyhitters_sketch/Make/argument-1-CM/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -CM (owl-base.Owl_heavyhitters_sketch.Make.CM)

Parameter Make.CM

Type definition
type 'a sketch

The type of Count-Min sketches

Core functions
val init : epsilon:float -> delta:float -> 'a sketch

init epsilon delta initializes a sketch with approximation ratio (1 + epsilon) and failure probability delta.

val incr : 'a sketch -> 'a -> unit

incr s x increments the frequency count of x in sketch s in-place.

val count : 'a sketch -> 'a -> int

count s x returns the estimated frequency of element x in s.

val init_from : 'a sketch -> 'a sketch

init_from s initializes a new empty sketch with the same parameters as s, which can later be merged with s.

val merge : 'a sketch -> 'a sketch -> 'a sketch

merge s1 s2 returns a new sketch whose counts are the sum of those in s1 and s2. Raises INVALID_ARGUMENT if the parameters of s1 and s2 do not match.

\ No newline at end of file diff --git a/owl-base/Owl_heavyhitters_sketch/Make/index.html b/owl-base/Owl_heavyhitters_sketch/Make/index.html deleted file mode 100644 index 6b7a59397..000000000 --- a/owl-base/Owl_heavyhitters_sketch/Make/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make (owl-base.Owl_heavyhitters_sketch.Make)

Module Owl_heavyhitters_sketch.Make

Parameters

Signature

Type definition
type 'a t

The type of heavy-hitters sketches

Core functions
val init : k:float -> epsilon:float -> delta:float -> 'a t

`init k epsilon delta` initializes a sketch with threshold k, approximation factor epsilon, and failure probability delta.

val add : 'a t -> 'a -> unit

`add h x` adds value `x` to sketch `h` in-place.

val get : 'a t -> ('a * int) list

`get h` returns a list of all heavy-hitters in sketch `h`, as a (value, frequency) pair, sorted in decreasing order of frequency.

\ No newline at end of file diff --git a/owl-base/Owl_heavyhitters_sketch/Native/index.html b/owl-base/Owl_heavyhitters_sketch/Native/index.html deleted file mode 100644 index 8098c30c8..000000000 --- a/owl-base/Owl_heavyhitters_sketch/Native/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Native (owl-base.Owl_heavyhitters_sketch.Native)

Module Owl_heavyhitters_sketch.Native

val init : k:float -> epsilon:float -> delta:float -> 'a t
val add : 'a t -> 'a -> unit
val get : 'a t -> ('a * int) list
\ No newline at end of file diff --git a/owl-base/Owl_heavyhitters_sketch/Owl/index.html b/owl-base/Owl_heavyhitters_sketch/Owl/index.html deleted file mode 100644 index a9b40662e..000000000 --- a/owl-base/Owl_heavyhitters_sketch/Owl/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Owl (owl-base.Owl_heavyhitters_sketch.Owl)

Module Owl_heavyhitters_sketch.Owl

val init : k:float -> epsilon:float -> delta:float -> 'a t
val add : 'a t -> 'a -> unit
val get : 'a t -> ('a * int) list
\ No newline at end of file diff --git a/owl-base/Owl_heavyhitters_sketch_sig/.dummy b/owl-base/Owl_heavyhitters_sketch_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_heavyhitters_sketch_sig/module-type-Sig/index.html b/owl-base/Owl_heavyhitters_sketch_sig/module-type-Sig/index.html deleted file mode 100644 index 38748379d..000000000 --- a/owl-base/Owl_heavyhitters_sketch_sig/module-type-Sig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Sig (owl-base.Owl_heavyhitters_sketch_sig.Sig)

Module type Owl_heavyhitters_sketch_sig.Sig

Type definition
type 'a t

The type of heavy-hitters sketches

Core functions
val init : k:float -> epsilon:float -> delta:float -> 'a t

`init k epsilon delta` initializes a sketch with threshold k, approximation factor epsilon, and failure probability delta.

val add : 'a t -> 'a -> unit

`add h x` adds value `x` to sketch `h` in-place.

val get : 'a t -> ('a * int) list

`get h` returns a list of all heavy-hitters in sketch `h`, as a (value, frequency) pair, sorted in decreasing order of frequency.

\ No newline at end of file diff --git a/owl-base/Owl_io/.dummy b/owl-base/Owl_io/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_lazy/.dummy b/owl-base/Owl_lazy/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_lazy/Make/argument-1-A/Linalg/index.html b/owl-base/Owl_lazy/Make/argument-1-A/Linalg/index.html deleted file mode 100644 index 761491989..000000000 --- a/owl-base/Owl_lazy/Make/argument-1-A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_lazy.Make.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_lazy/Make/argument-1-A/Mat/index.html b/owl-base/Owl_lazy/Make/argument-1-A/Mat/index.html deleted file mode 100644 index 9e33addb4..000000000 --- a/owl-base/Owl_lazy/Make/argument-1-A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_lazy.Make.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_lazy/Make/argument-1-A/Scalar/index.html b/owl-base/Owl_lazy/Make/argument-1-A/Scalar/index.html deleted file mode 100644 index 178582cab..000000000 --- a/owl-base/Owl_lazy/Make/argument-1-A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_lazy.Make.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_lazy/Make/argument-1-A/index.html b/owl-base/Owl_lazy/Make/argument-1-A/index.html deleted file mode 100644 index e5ec0b434..000000000 --- a/owl-base/Owl_lazy/Make/argument-1-A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_lazy.Make.A)

Parameter Make.A

include Owl_types_ndarray_mutable.Sig
include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_lazy/Make/index.html b/owl-base/Owl_lazy/Make/index.html deleted file mode 100644 index e8a6c26c2..000000000 --- a/owl-base/Owl_lazy/Make/index.html +++ /dev/null @@ -1,87 +0,0 @@ - -Make (owl-base.Owl_lazy.Make)

Module Owl_lazy.Make

Parameters

Signature

Type definition
type arr

TODO

type elt

TODO

type value

TODO

type attr

TODO

type graph

TODO

Type conversion functions
val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val node_to_arr : attr Owl_graph.node -> arr

TODO

val arr_to_node : arr -> attr Owl_graph.node

TODO

val node_to_elt : attr Owl_graph.node -> elt

TODO

val elt_to_node : elt -> attr Owl_graph.node

TODO

val pack_arr : A.arr -> arr

TODO

val unpack_arr : arr -> A.arr

TODO

val pack_elt : A.elt -> elt

TODO

val unpack_elt : elt -> A.elt

TODO

val float_to_elt : float -> elt

TODO

val elt_to_float : elt -> float

TODO

Utility functions
val graph_to_dot : graph -> string

TODO

val graph_to_trace : graph -> string

TODO

Create variables
val var_arr : ?shape:int array -> string -> arr

TODO

val var_elt : string -> elt

TODO

val const_arr : string -> A.arr -> arr

TODO

val const_elt : string -> A.elt -> elt

TODO

val assign_arr : arr -> A.arr -> unit

TODO

val assign_elt : elt -> A.elt -> unit

TODO

val unsafe_assign_arr : arr -> A.arr -> unit

TODO

Maths functions
val noop : arr -> arr

TODO

val empty : int array -> arr

TODO

val zeros : int array -> arr

TODO

val ones : int array -> arr

TODO

val create : int array -> elt -> arr

TODO

val sequential : ?a:elt -> ?step:elt -> int array -> arr

TODO

val uniform : ?a:elt -> ?b:elt -> int array -> arr

TODO

val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr

TODO

val bernoulli : ?p:elt -> int array -> arr

TODO

val init : int array -> (int -> elt) -> arr

TODO

val shape : arr -> int array

TODO

val numel : arr -> int

TODO

val get : arr -> int array -> elt

TODO

val set : arr -> int array -> elt -> unit

TODO

val get_slice : int list list -> arr -> arr

TODO

val set_slice : int list list -> arr -> arr -> unit

TODO

val copy : arr -> arr

TODO

val reset : arr -> unit

TODO

val reshape : arr -> int array -> arr

TODO

val reverse : arr -> arr

TODO

val tile : arr -> int array -> arr

TODO

val repeat : arr -> int array -> arr

TODO

val concatenate : ?axis:int -> arr array -> arr

TODO

val split : ?axis:int -> int array -> arr -> arr array

TODO

val draw : ?axis:int -> arr -> int -> arr * 'a array

TODO

val map : (elt -> elt) -> arr -> arr

TODO

val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr

TODO

val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr

TODO

val one_hot : int -> arr -> arr

TODO

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(A.elt -> string) -> - arr -> - arr

TODO

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val abs : arr -> arr

TODO

val neg : arr -> arr

TODO

val floor : arr -> arr

TODO

val ceil : arr -> arr

TODO

val round : arr -> arr

TODO

val sqr : arr -> arr

TODO

val sqrt : arr -> arr

TODO

val log : arr -> arr

TODO

val log2 : arr -> arr

TODO

val log10 : arr -> arr

TODO

val exp : arr -> arr

TODO

val sin : arr -> arr

TODO

val cos : arr -> arr

TODO

val tan : arr -> arr

TODO

val sinh : arr -> arr

TODO

val cosh : arr -> arr

TODO

val tanh : arr -> arr

TODO

val asin : arr -> arr

TODO

val acos : arr -> arr

TODO

val atan : arr -> arr

TODO

val asinh : arr -> arr

TODO

val acosh : arr -> arr

TODO

val atanh : arr -> arr

TODO

val min : ?axis:int -> ?keep_dims:bool -> arr -> arr

TODO

val max : ?axis:int -> ?keep_dims:bool -> arr -> arr

TODO

val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr

TODO

val sum_reduce : ?axis:int array -> arr -> arr

TODO

val signum : arr -> arr

TODO

val sigmoid : arr -> arr

TODO

val relu : arr -> arr

TODO

val min' : arr -> elt

TODO

val max' : arr -> elt

TODO

val sum' : arr -> elt

TODO

val l1norm' : arr -> elt

TODO

val l2norm' : arr -> elt

TODO

val l2norm_sqr' : arr -> elt

TODO

val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr

TODO

val clip_by_l2norm : elt -> arr -> arr

TODO

val pow : arr -> arr -> arr

TODO

val scalar_pow : elt -> arr -> arr

TODO

val pow_scalar : arr -> elt -> arr

TODO

val atan2 : arr -> arr -> arr

TODO

val scalar_atan2 : elt -> arr -> arr

TODO

val atan2_scalar : arr -> elt -> arr

TODO

val hypot : arr -> arr -> arr

TODO

val min2 : arr -> arr -> arr

TODO

val max2 : arr -> arr -> arr

TODO

val add : arr -> arr -> arr

TODO

val sub : arr -> arr -> arr

TODO

val mul : arr -> arr -> arr

TODO

val div : arr -> arr -> arr

TODO

val add_scalar : arr -> elt -> arr

TODO

val sub_scalar : arr -> elt -> arr

TODO

val mul_scalar : arr -> elt -> arr

TODO

val div_scalar : arr -> elt -> arr

TODO

val scalar_add : elt -> arr -> arr

TODO

val scalar_sub : elt -> arr -> arr

TODO

val scalar_mul : elt -> arr -> arr

TODO

val scalar_div : elt -> arr -> arr

TODO

val fma : arr -> arr -> arr -> arr

TODO

val elt_equal : arr -> arr -> arr

TODO

val elt_not_equal : arr -> arr -> arr

TODO

val elt_less : arr -> arr -> arr

TODO

val elt_greater : arr -> arr -> arr

TODO

val elt_less_equal : arr -> arr -> arr

TODO

val elt_greater_equal : arr -> arr -> arr

TODO

val elt_equal_scalar : arr -> elt -> arr

TODO

val elt_not_equal_scalar : arr -> elt -> arr

TODO

val elt_less_scalar : arr -> elt -> arr

TODO

val elt_greater_scalar : arr -> elt -> arr

TODO

val elt_less_equal_scalar : arr -> elt -> arr

TODO

val elt_greater_equal_scalar : arr -> elt -> arr

TODO

val conv1d : ?padding:Owl_types.padding -> arr -> arr -> int array -> arr

TODO

val conv2d : ?padding:Owl_types.padding -> arr -> arr -> int array -> arr

TODO

val conv3d : ?padding:Owl_types.padding -> arr -> arr -> int array -> arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - arr

TODO

val conv1d_backward_input : arr -> arr -> int array -> arr -> arr

TODO

val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr

TODO

val conv2d_backward_input : arr -> arr -> int array -> arr -> arr

TODO

val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr

TODO

val conv3d_backward_input : arr -> arr -> int array -> arr -> arr

TODO

val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr

TODO

val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr

TODO

val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - arr

TODO

val row_num : arr -> int

TODO

val col_num : arr -> int

TODO

val row : arr -> 'a -> arr

TODO

val rows : arr -> int array -> arr

TODO

val copy_row_to : arr -> 'a -> 'b -> unit

TODO

val copy_col_to : arr -> 'a -> 'b -> unit

TODO

val trace : arr -> elt

TODO

val dot : arr -> arr -> arr

TODO

val transpose : ?axis:int array -> arr -> arr

TODO

val to_rows : arr -> 'a array

TODO

val of_rows : arr array -> arr

TODO

val to_cols : arr -> 'a array

TODO

val of_cols : arr array -> arr

TODO

val of_array : elt array -> int array -> arr

TODO

val of_arrays : elt array array -> arr

TODO

Evaluation functions
val make_graph : - input:attr Owl_graph.node array -> - output:attr Owl_graph.node array -> - string -> - graph

TODO

val get_inputs : graph -> attr Owl_graph.node array

TODO

val get_outputs : graph -> attr Owl_graph.node array

TODO

val make_iopair : - graph -> - attr Owl_graph.node array -> - attr Owl_graph.node array -> - unit

TODO

val update_iopair : graph -> unit

TODO

val init_inputs : (attr Owl_graph.node -> value) -> graph -> unit

TODO

val optimise : graph -> unit

TODO

val eval_elt : elt array -> unit

TODO

val eval_arr : arr array -> unit

TODO

val eval_graph : graph -> unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_log/.dummy b/owl-base/Owl_log/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_maths_interpolate/.dummy b/owl-base/Owl_maths_interpolate/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_maths_quadrature/.dummy b/owl-base/Owl_maths_quadrature/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_maths_root/.dummy b/owl-base/Owl_maths_root/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_neural_compiler/.dummy b/owl-base/Owl_neural_compiler/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Linalg/index.html b/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Linalg/index.html deleted file mode 100644 index 46c580654..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_neural_compiler.Make.Engine.Graph.Optimiser.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Mat/index.html b/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Mat/index.html deleted file mode 100644 index 1b83df28f..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_compiler.Make.Engine.Graph.Optimiser.Operator.Mat)

Module Operator.Mat

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Scalar/index.html b/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Scalar/index.html deleted file mode 100644 index a9d93754d..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_neural_compiler.Make.Engine.Graph.Optimiser.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index a77ade316..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_compiler.Make.Engine.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 8ea142e34..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_compiler.Make.Engine.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index ab48aed61..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_neural_compiler.Make.Engine.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index 134103e56..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_neural_compiler.Make.Engine.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 742436c6a..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_neural_compiler.Make.Engine.Graph.Optimiser.Operator.Symbol.Shape.Type.Device)

Module Type.Device

module A : sig ... end
val make_device : unit -> device
val arr_to_value : A.arr -> value
val value_to_arr : value -> A.arr
val elt_to_value : A.elt -> value
val value_to_elt : value -> A.elt
val value_to_float : value -> float
val is_arr : value -> bool
val is_elt : value -> bool
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index 15ce71cf9..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_neural_compiler.Make.Engine.Graph.Optimiser.Operator.Symbol.Shape.Type)

Module Shape.Type

module Device : sig ... end
and block = E.Graph.Optimiser.Operator.Symbol.Shape.Type.block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}
and attr = E.Graph.Optimiser.Operator.Symbol.Shape.Type.attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}
and op = E.Graph.Optimiser.Operator.Symbol.Shape.Type.op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/index.html b/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/index.html deleted file mode 100644 index 714981d7c..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_neural_compiler.Make.Engine.Graph.Optimiser.Operator.Symbol.Shape)

Module Symbol.Shape

module Type : sig ... end
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/index.html b/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/index.html deleted file mode 100644 index b82e88432..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_neural_compiler.Make.Engine.Graph.Optimiser.Operator.Symbol)

Module Operator.Symbol

module Shape : sig ... end
val op_to_str : Shape.Type.op -> string
val is_random_variable : Shape.Type.op -> bool
val refnum : 'a Owl_graph.node -> int
val node_shape : Shape.Type.attr Owl_graph.node -> int array
val node_numel : Shape.Type.attr Owl_graph.node -> int
val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool
val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit
val shape_to_str : int array option array -> string
val node_to_str : Shape.Type.attr Owl_graph.node -> string
val node_to_arr : Shape.Type.t -> Shape.Type.arr
val arr_to_node : Shape.Type.arr -> Shape.Type.t
val node_to_elt : Shape.Type.t -> Shape.Type.elt
val elt_to_node : Shape.Type.elt -> Shape.Type.t
val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node
val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node
val var_arr : ?shape:int array -> string -> Shape.Type.arr
val var_elt : string -> Shape.Type.elt
val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr
val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt
val new_block_id : unit -> int
val make_empty_block : ?block_id:int -> int -> Shape.Type.block
val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit
val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit
val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option
val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit
val get_block_id : Shape.Type.attr Owl_graph.node -> int
val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit
val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit
val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit
val get_reuse : Shape.Type.attr Owl_graph.node -> bool
val is_shared : Shape.Type.attr Owl_graph.node -> bool
val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array
val is_var : Shape.Type.attr Owl_graph.node -> bool
val is_const : Shape.Type.attr Owl_graph.node -> bool
val is_node_arr : Shape.Type.attr Owl_graph.node -> bool
val is_node_elt : Shape.Type.attr Owl_graph.node -> bool
val is_assigned : Shape.Type.attr Owl_graph.node -> bool
val check_assigned : Shape.Type.attr Owl_graph.node -> unit
val is_valid : Shape.Type.attr Owl_graph.node -> bool
val validate : Shape.Type.attr Owl_graph.node -> unit
val invalidate : Shape.Type.attr Owl_graph.node -> unit
val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit
val is_freeze : Shape.Type.attr Owl_graph.node -> bool
val freeze : Shape.Type.attr Owl_graph.node -> unit
val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit
val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit
val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit
val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit
val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit
val float_to_elt : float -> Shape.Type.elt
val elt_to_float : Shape.Type.elt -> float
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/index.html b/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/index.html deleted file mode 100644 index 920d9c011..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_neural_compiler.Make.Engine.Graph.Optimiser.Operator)

Module Optimiser.Operator

module Symbol : sig ... end
val empty : int array -> Symbol.Shape.Type.arr
val zeros : int array -> Symbol.Shape.Type.arr
val ones : int array -> Symbol.Shape.Type.arr
val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr
val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr
val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr
val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr
val shape : Symbol.Shape.Type.arr -> int array
val numel : Symbol.Shape.Type.arr -> int
val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit
val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr
val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit
val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit
val copy_ : out:'a -> 'b -> 'c
val reset : Symbol.Shape.Type.arr -> unit
val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr
val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr
val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr
val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr
val split : ?axis:int -> 'a -> 'b -> 'c
val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array
val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit
val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr
val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr
val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val row_num : Symbol.Shape.Type.arr -> int
val col_num : Symbol.Shape.Type.arr -> int
val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit
val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit
val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val to_rows : Symbol.Shape.Type.arr -> 'a array
val to_cols : Symbol.Shape.Type.arr -> 'a array
val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr
val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr
val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/index.html b/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/index.html deleted file mode 100644 index 7f20b7899..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/Graph/Optimiser/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Optimiser (owl-base.Owl_neural_compiler.Make.Engine.Graph.Optimiser)

Module Graph.Optimiser

module Operator : sig ... end
val estimate_complexity : 'a Owl_graph.node array -> int * int
val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Engine/Graph/index.html b/owl-base/Owl_neural_compiler/Make/Engine/Graph/index.html deleted file mode 100644 index eaa02c03f..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/Graph/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Graph (owl-base.Owl_neural_compiler.Make.Engine.Graph)

Module Engine.Graph

module Optimiser : sig ... end
type graph = E.Graph.graph
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string
val graph_to_dot : graph -> string
val graph_to_trace : graph -> string
val save_graph : 'a -> string -> unit
val load_graph : string -> 'a * 'b
val invalidate_rvs : graph -> unit
val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool
val update_iopair : graph -> unit
val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array
val optimise : graph -> unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Engine/index.html b/owl-base/Owl_neural_compiler/Make/Engine/index.html deleted file mode 100644 index 1840cd461..000000000 --- a/owl-base/Owl_neural_compiler/Make/Engine/index.html +++ /dev/null @@ -1,485 +0,0 @@ - -Engine (owl-base.Owl_neural_compiler.Make.Engine)

Module Make.Engine

module Graph : sig ... end
val eval_graph : Graph.graph -> unit
module Optimiser = Graph.Optimiser
type graph = E.Graph.graph
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string
val graph_to_dot : graph -> string
val graph_to_trace : graph -> string
val save_graph : 'a -> string -> unit
val load_graph : string -> 'a * 'b
val invalidate_rvs : graph -> unit
val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool
val update_iopair : graph -> unit
val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array
val optimise : graph -> unit
module Operator = Graph.Optimiser.Operator
val estimate_complexity : 'a Owl_graph.node array -> int * int
val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit
val empty : int array -> Symbol.Shape.Type.arr
val zeros : int array -> Symbol.Shape.Type.arr
val ones : int array -> Symbol.Shape.Type.arr
val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr
val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr
val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr
val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr
val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr
val shape : Symbol.Shape.Type.arr -> int array
val numel : Symbol.Shape.Type.arr -> int
val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit
val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr
val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit
val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit
val copy_ : out:'a -> 'b -> 'c
val reset : Symbol.Shape.Type.arr -> unit
val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr
val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr
val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr
val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr
val split : ?axis:int -> 'a -> 'b -> 'c
val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array
val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit
val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr
val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr
val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr
val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val row_num : Symbol.Shape.Type.arr -> int
val col_num : Symbol.Shape.Type.arr -> int
val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr
val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit
val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit
val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr
val to_rows : Symbol.Shape.Type.arr -> 'a array
val to_cols : Symbol.Shape.Type.arr -> 'a array
val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr
val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr
val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array
val op_to_str : Shape.Type.op -> string
val is_random_variable : Shape.Type.op -> bool
val refnum : 'a Owl_graph.node -> int
val node_shape : Shape.Type.attr Owl_graph.node -> int array
val node_numel : Shape.Type.attr Owl_graph.node -> int
val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool
val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit
val shape_to_str : int array option array -> string
val node_to_str : Shape.Type.attr Owl_graph.node -> string
val node_to_arr : Shape.Type.t -> Shape.Type.arr
val arr_to_node : Shape.Type.arr -> Shape.Type.t
val node_to_elt : Shape.Type.t -> Shape.Type.elt
val elt_to_node : Shape.Type.elt -> Shape.Type.t
val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node
val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node
val var_arr : ?shape:int array -> string -> Shape.Type.arr
val var_elt : string -> Shape.Type.elt
val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr
val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt
val new_block_id : unit -> int
val make_empty_block : ?block_id:int -> int -> Shape.Type.block
val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit
val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit
val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option
val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit
val get_block_id : Shape.Type.attr Owl_graph.node -> int
val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit
val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit
val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit
val get_reuse : Shape.Type.attr Owl_graph.node -> bool
val is_shared : Shape.Type.attr Owl_graph.node -> bool
val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array
val is_var : Shape.Type.attr Owl_graph.node -> bool
val is_const : Shape.Type.attr Owl_graph.node -> bool
val is_node_arr : Shape.Type.attr Owl_graph.node -> bool
val is_node_elt : Shape.Type.attr Owl_graph.node -> bool
val is_assigned : Shape.Type.attr Owl_graph.node -> bool
val check_assigned : Shape.Type.attr Owl_graph.node -> unit
val is_valid : Shape.Type.attr Owl_graph.node -> bool
val validate : Shape.Type.attr Owl_graph.node -> unit
val invalidate : Shape.Type.attr Owl_graph.node -> unit
val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit
val is_freeze : Shape.Type.attr Owl_graph.node -> bool
val freeze : Shape.Type.attr Owl_graph.node -> unit
val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit
val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit
val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit
val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit
val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit
val float_to_elt : float -> Shape.Type.elt
val elt_to_float : Shape.Type.elt -> float
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array
and block = E.Graph.Optimiser.Operator.Symbol.Shape.Type.block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}
and attr = E.Graph.Optimiser.Operator.Symbol.Shape.Type.attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}
and op = E.Graph.Optimiser.Operator.Symbol.Shape.Type.op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
val make_device : unit -> device
val arr_to_value : A.arr -> value
val value_to_arr : value -> A.arr
val elt_to_value : A.elt -> value
val value_to_elt : value -> A.elt
val value_to_float : value -> float
val is_arr : value -> bool
val is_elt : value -> bool
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Activation/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Activation/index.html deleted file mode 100644 index ceeed5247..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Activation/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Activation (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Activation)

Module Neuron.Activation

type typ = Owl_neural_generic.Make_Embedded(Engine).Neuron.Activation.typ =
  1. | Elu
  2. | Relu
  3. | Sigmoid
  4. | HardSigmoid
  5. | Softmax of int
  6. | Softplus
  7. | Softsign
  8. | Tanh
  9. | Relu6
  10. | LeakyRelu of float
  11. | TRelu of float
  12. | Custom of Optimise.Algodiff.t -> Optimise.Algodiff.t
  13. | None
type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Activation.neuron_typ = - {
  1. mutable activation : typ;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val run_activation : Optimise.Algodiff.t -> typ -> Optimise.Algodiff.t
val copy : neuron_typ -> neuron_typ
val activation_to_string : typ -> string
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Add/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Add/index.html deleted file mode 100644 index b290ba30d..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Add/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Add (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Add)

Module Neuron.Add

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Add.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/AlphaDropout/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/AlphaDropout/index.html deleted file mode 100644 index f84573020..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/AlphaDropout/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -AlphaDropout (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.AlphaDropout)

Module Neuron.AlphaDropout

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.AlphaDropout.neuron_typ = - {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Average/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Average/index.html deleted file mode 100644 index b75e5903f..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Average/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Average (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Average)

Module Neuron.Average

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Average.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/AvgPool1D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/AvgPool1D/index.html deleted file mode 100644 index a1c4f7701..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/AvgPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -AvgPool1D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.AvgPool1D)

Module Neuron.AvgPool1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.AvgPool1D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/AvgPool2D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/AvgPool2D/index.html deleted file mode 100644 index 904eaf03b..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/AvgPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -AvgPool2D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.AvgPool2D)

Module Neuron.AvgPool2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.AvgPool2D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Concatenate/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Concatenate/index.html deleted file mode 100644 index 32aff4f28..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Concatenate/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Concatenate (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Concatenate)

Module Neuron.Concatenate

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Concatenate.neuron_typ = - {
  1. mutable axis : int;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Conv1D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Conv1D/index.html deleted file mode 100644 index 41e56abac..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Conv1D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Conv1D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Conv1D)

Module Neuron.Conv1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Conv1D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Conv2D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Conv2D/index.html deleted file mode 100644 index d34466fb8..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Conv2D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Conv2D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Conv2D)

Module Neuron.Conv2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Conv2D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Conv3D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Conv3D/index.html deleted file mode 100644 index 916606827..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Conv3D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Conv3D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Conv3D)

Module Neuron.Conv3D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Conv3D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/DilatedConv1D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/DilatedConv1D/index.html deleted file mode 100644 index a801c3234..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/DilatedConv1D/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -DilatedConv1D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.DilatedConv1D)

Module Neuron.DilatedConv1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.DilatedConv1D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/DilatedConv2D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/DilatedConv2D/index.html deleted file mode 100644 index 4f2996a30..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/DilatedConv2D/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -DilatedConv2D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.DilatedConv2D)

Module Neuron.DilatedConv2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.DilatedConv2D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/DilatedConv3D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/DilatedConv3D/index.html deleted file mode 100644 index ef4d28772..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/DilatedConv3D/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -DilatedConv3D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.DilatedConv3D)

Module Neuron.DilatedConv3D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.DilatedConv3D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Dot/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Dot/index.html deleted file mode 100644 index 65581c135..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Dot/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Dot (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Dot)

Module Neuron.Dot

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Dot.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Dropout/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Dropout/index.html deleted file mode 100644 index feaed628c..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Dropout/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Dropout (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Dropout)

Module Neuron.Dropout

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Dropout.neuron_typ = - {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Embedding/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Embedding/index.html deleted file mode 100644 index fa2947152..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Embedding/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Embedding (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Embedding)

Module Neuron.Embedding

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Embedding.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_dim : int;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Flatten/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Flatten/index.html deleted file mode 100644 index 60a88ec55..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Flatten/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Flatten (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Flatten)

Module Neuron.Flatten

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Flatten.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/FullyConnected/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/FullyConnected/index.html deleted file mode 100644 index e63c05403..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/FullyConnected/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -FullyConnected (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.FullyConnected)

Module Neuron.FullyConnected

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.FullyConnected.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GRU/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GRU/index.html deleted file mode 100644 index b93212a9a..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GRU/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GRU (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.GRU)

Module Neuron.GRU

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.GRU.neuron_typ = - {
  1. mutable wxz : Optimise.Algodiff.t;
  2. mutable whz : Optimise.Algodiff.t;
  3. mutable wxr : Optimise.Algodiff.t;
  4. mutable whr : Optimise.Algodiff.t;
  5. mutable wxh : Optimise.Algodiff.t;
  6. mutable whh : Optimise.Algodiff.t;
  7. mutable bz : Optimise.Algodiff.t;
  8. mutable br : Optimise.Algodiff.t;
  9. mutable bh : Optimise.Algodiff.t;
  10. mutable h : Optimise.Algodiff.t;
  11. mutable init_typ : Init.typ;
  12. mutable in_shape : int array;
  13. mutable out_shape : int array;
}
val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GaussianDropout/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GaussianDropout/index.html deleted file mode 100644 index d34b556f4..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GaussianDropout/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GaussianDropout (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.GaussianDropout)

Module Neuron.GaussianDropout

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.GaussianDropout.neuron_typ = - {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GaussianNoise/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GaussianNoise/index.html deleted file mode 100644 index 35576b629..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GaussianNoise/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GaussianNoise (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.GaussianNoise)

Module Neuron.GaussianNoise

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.GaussianNoise.neuron_typ = - {
  1. mutable sigma : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalAvgPool1D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalAvgPool1D/index.html deleted file mode 100644 index 61903c7f0..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalAvgPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalAvgPool1D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.GlobalAvgPool1D)

Module Neuron.GlobalAvgPool1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.GlobalAvgPool1D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalAvgPool2D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalAvgPool2D/index.html deleted file mode 100644 index 928f43c56..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalAvgPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalAvgPool2D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.GlobalAvgPool2D)

Module Neuron.GlobalAvgPool2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.GlobalAvgPool2D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalMaxPool1D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalMaxPool1D/index.html deleted file mode 100644 index 8b70ad930..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalMaxPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalMaxPool1D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.GlobalMaxPool1D)

Module Neuron.GlobalMaxPool1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.GlobalMaxPool1D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalMaxPool2D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalMaxPool2D/index.html deleted file mode 100644 index 6f41ebc8a..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/GlobalMaxPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalMaxPool2D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.GlobalMaxPool2D)

Module Neuron.GlobalMaxPool2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.GlobalMaxPool2D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Init/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Init/index.html deleted file mode 100644 index a89f7b6fc..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Init/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Init (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Init)

Module Neuron.Init

type typ = Owl_neural_generic.Make_Embedded(Engine).Neuron.Init.typ =
  1. | Uniform of float * float
  2. | Gaussian of float * float
  3. | Standard
  4. | Tanh
  5. | GlorotNormal
  6. | GlorotUniform
  7. | LecunNormal
  8. | HeNormal
  9. | Custom of int array -> Optimise.Algodiff.t
val calc_fans : int array -> float * float
val run : typ -> int array -> Optimise.Algodiff.t -> Optimise.Algodiff.t
val to_string : typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Input/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Input/index.html deleted file mode 100644 index 16be452f9..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Input/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Input (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Input)

Module Neuron.Input

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Input.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : int array -> neuron_typ
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/LSTM/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/LSTM/index.html deleted file mode 100644 index 5bd3ac21b..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/LSTM/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -LSTM (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.LSTM)

Module Neuron.LSTM

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.LSTM.neuron_typ = - {
  1. mutable wxi : Optimise.Algodiff.t;
  2. mutable whi : Optimise.Algodiff.t;
  3. mutable wxc : Optimise.Algodiff.t;
  4. mutable whc : Optimise.Algodiff.t;
  5. mutable wxf : Optimise.Algodiff.t;
  6. mutable whf : Optimise.Algodiff.t;
  7. mutable wxo : Optimise.Algodiff.t;
  8. mutable who : Optimise.Algodiff.t;
  9. mutable bi : Optimise.Algodiff.t;
  10. mutable bc : Optimise.Algodiff.t;
  11. mutable bf : Optimise.Algodiff.t;
  12. mutable bo : Optimise.Algodiff.t;
  13. mutable c : Optimise.Algodiff.t;
  14. mutable h : Optimise.Algodiff.t;
  15. mutable init_typ : Init.typ;
  16. mutable in_shape : int array;
  17. mutable out_shape : int array;
}
val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Lambda/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Lambda/index.html deleted file mode 100644 index e214e6f92..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Lambda/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Lambda (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Lambda)

Module Neuron.Lambda

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Lambda.neuron_typ = - {
  1. mutable lambda : Optimise.Algodiff.t -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : - ?out_shape:int array -> - (Optimise.Algodiff.t -> Optimise.Algodiff.t) -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/LambdaArray/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/LambdaArray/index.html deleted file mode 100644 index e5b183b0b..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/LambdaArray/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -LambdaArray (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.LambdaArray)

Module Neuron.LambdaArray

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.LambdaArray.neuron_typ = - {
  1. mutable lambda : Optimise.Algodiff.t array -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : - int array -> - (Optimise.Algodiff.t array -> Optimise.Algodiff.t) -> - neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Linear/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Linear/index.html deleted file mode 100644 index bc5a0f438..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Linear/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Linear (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Linear)

Module Neuron.Linear

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Linear.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/LinearNoBias/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/LinearNoBias/index.html deleted file mode 100644 index 0b2cbf7df..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/LinearNoBias/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -LinearNoBias (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.LinearNoBias)

Module Neuron.LinearNoBias

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.LinearNoBias.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_shape : int array;
  4. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Masking/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Masking/index.html deleted file mode 100644 index 436fd6537..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Masking/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Masking (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Masking)

Module Neuron.Masking

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Max/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Max/index.html deleted file mode 100644 index a3ea8ad27..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Max/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Max (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Max)

Module Neuron.Max

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Max.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/MaxPool1D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/MaxPool1D/index.html deleted file mode 100644 index 86566a723..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/MaxPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -MaxPool1D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.MaxPool1D)

Module Neuron.MaxPool1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.MaxPool1D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/MaxPool2D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/MaxPool2D/index.html deleted file mode 100644 index a3903eba6..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/MaxPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -MaxPool2D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.MaxPool2D)

Module Neuron.MaxPool2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.MaxPool2D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Mul/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Mul/index.html deleted file mode 100644 index fb14ebde0..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Mul/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Mul (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Mul)

Module Neuron.Mul

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Mul.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Normalisation/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Normalisation/index.html deleted file mode 100644 index 271969984..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Normalisation/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Normalisation (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Normalisation)

Module Neuron.Normalisation

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Normalisation.neuron_typ = - {
  1. mutable axis : int;
  2. mutable beta : Optimise.Algodiff.t;
  3. mutable gamma : Optimise.Algodiff.t;
  4. mutable mu : Optimise.Algodiff.t;
  5. mutable var : Optimise.Algodiff.t;
  6. mutable decay : Optimise.Algodiff.t;
  7. mutable training : bool;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?training:bool -> - ?decay:float -> - ?mu:Optimise.Algodiff.A.arr -> - ?var:Optimise.Algodiff.A.arr -> - int -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val load_weights : neuron_typ -> Optimise.Algodiff.t array -> unit
val save_weights : neuron_typ -> Optimise.Algodiff.t array
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index 063dc809f..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index 43f8a9002..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index f4267e690..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/index.html deleted file mode 100644 index ec17f7289..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,160 +0,0 @@ - -A (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.A)

Module Algodiff.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Arr/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Arr/index.html deleted file mode 100644 index 7981d66ec..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/index.html deleted file mode 100644 index 8763caa7b..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.Builder)

Module Algodiff.Builder

module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t
module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t
module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t
module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array
module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t
module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index 95ee6f144..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index 62cf018e2..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index c110165be..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index 76eeb31d2..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index 9e0c83ae4..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index e2caec601..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Linalg/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index e04a9acae..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t
val logdet : t -> t
val chol : ?upper:bool -> t -> t
val qr : t -> t * t
val lq : t -> t * t
val svd : ?thin:bool -> t -> t * t * t
val sylvester : t -> t -> t -> t
val lyapunov : t -> t -> t
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - t -> - t -> - t
val (/@) : t -> t -> t
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> t -> t -> t
val care : ?diag_r:bool -> t -> t -> t -> t -> t
val dare : ?diag_r:bool -> t -> t -> t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Mat/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Mat/index.html deleted file mode 100644 index de2a96f6c..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Maths/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Maths/index.html deleted file mode 100644 index a06619a77..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t
val (-) : t -> t -> t
val (*) : t -> t -> t
val (/) : t -> t -> t
val (*@) : t -> t -> t
val (**) : t -> t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val kron : t -> t -> t
val dot : t -> t -> t
val pow : t -> t -> t
val atan2 : t -> t -> t
val min2 : t -> t -> t
val max2 : t -> t -> t
val cross_entropy : t -> t -> t
val inv : t -> t
val neg : t -> t
val abs : t -> t
val signum : t -> t
val floor : t -> t
val ceil : t -> t
val round : t -> t
val sqr : t -> t
val sqrt : t -> t
val log : t -> t
val log2 : t -> t
val log10 : t -> t
val exp : t -> t
val sin : t -> t
val cos : t -> t
val tan : t -> t
val sinh : t -> t
val cosh : t -> t
val tanh : t -> t
val asin : t -> t
val acos : t -> t
val atan : t -> t
val asinh : t -> t
val acosh : t -> t
val atanh : t -> t
val sum' : t -> t
val log_sum_exp' : t -> t
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t
val sum : ?axis:int -> ?keep_dims:bool -> t -> t
val sum_reduce : ?axis:int array -> t -> t
val mean : t -> t
val transpose : ?axis:int array -> t -> t
val swap : int -> int -> t -> t
val l1norm' : t -> t
val l2norm' : t -> t
val l2norm_sqr' : t -> t
val sigmoid : t -> t
val relu : t -> t
val dawsn : t -> t
val softplus : t -> t
val softsign : t -> t
val softmax : ?axis:int -> t -> t
val reshape : t -> int array -> t
val flatten : t -> t
val get_item : t -> int -> int -> t
val get_row : t -> int -> t
val concat : axis:int -> t -> t -> t
val split : axis:int -> int array -> t -> t array
val of_arrays : t array array -> t
val to_arrays : t -> t array array
val concatenate : axis:int -> t array -> t
val stack : axis:int -> t array -> t
val get_slice : int list list -> t -> t
val set_slice : int list list -> t -> t -> t
val get_fancy : Owl_types.index list -> t -> t
val set_fancy : Owl_types.index list -> t -> t -> t
val diag : ?k:int -> t -> t
val diagm : ?k:int -> t -> t
val trace : t -> t
val triu : ?k:int -> t -> t
val tril : ?k:int -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/NN/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/NN/index.html deleted file mode 100644 index dafc2270d..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t
val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val upsampling2d : t -> int array -> t
val pad : ?v:A.elt -> int list list -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/index.html deleted file mode 100644 index 544f6ae82..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Algodiff/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Algodiff (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Algodiff)

Module Optimise.Algodiff

module A : sig ... end
type t = Owl_neural_generic.Make_Embedded(Engine).Neuron.Optimise.Algodiff.t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
val tag : unit -> int
val primal : t -> t
val primal' : t -> t
val zero : t -> t
val reset_zero : t -> t
val tangent : t -> t
val adjref : t -> t Stdlib.ref
val adjval : t -> t
val shape : t -> int array
val is_float : t -> bool
val is_arr : t -> bool
val row_num : t -> int
val col_num : t -> int
val numel : t -> int
val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t
val clip_by_l2norm : A.elt -> t -> t
val copy_primal' : t -> t
val tile : t -> int array -> t
val repeat : t -> int array -> t
val pack_elt : A.elt -> t
val unpack_elt : t -> A.elt
val pack_flt : float -> t
val _f : float -> t
val unpack_flt : t -> float
val pack_arr : A.arr -> t
val unpack_arr : t -> A.arr
val deep_info : t -> string
val type_info : t -> string
val error_binop : string -> t -> t -> 'a
val error_uniop : string -> t -> 'a
val make_forward : t -> t -> int -> t
val make_reverse : t -> int -> t
val reverse_prop : t -> t -> unit
val diff : (t -> t) -> t -> t
val diff' : (t -> t) -> t -> t * t
val grad : (t -> t) -> t -> t
val grad' : (t -> t) -> t -> t * t
val jacobian : (t -> t) -> t -> t
val jacobian' : (t -> t) -> t -> t * t
val jacobianv : (t -> t) -> t -> t -> t
val jacobianv' : (t -> t) -> t -> t -> t * t
val jacobianTv : (t -> t) -> t -> t -> t
val jacobianTv' : (t -> t) -> t -> t -> t * t
val hessian : (t -> t) -> t -> t
val hessian' : (t -> t) -> t -> t * t
val hessianv : (t -> t) -> t -> t -> t
val hessianv' : (t -> t) -> t -> t -> t * t
val laplacian : (t -> t) -> t -> t
val laplacian' : (t -> t) -> t -> t * t
val gradhessian : (t -> t) -> t -> t * t
val gradhessian' : (t -> t) -> t -> t * t * t
val gradhessianv : (t -> t) -> t -> t -> t * t
val gradhessianv' : (t -> t) -> t -> t -> t * t * t
module Builder : sig ... end
module Maths : sig ... end
module Linalg : sig ... end
module NN : sig ... end
module Mat : sig ... end
module Arr : sig ... end
val to_trace : t list -> string
val to_dot : t list -> string
val pp_num : Stdlib.Format.formatter -> t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Batch/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Batch/index.html deleted file mode 100644 index 02b165b77..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Batch/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Batch (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Batch)

Module Optimise.Batch

type typ = Owl_neural_generic.Make_Embedded(Engine).Neuron.Optimise.Batch.typ =
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic
val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val batches : typ -> Algodiff.t -> int
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Checkpoint/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Checkpoint/index.html deleted file mode 100644 index cac526494..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Checkpoint/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Checkpoint (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Checkpoint)

Module Optimise.Checkpoint

type state = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Optimise.Checkpoint.state = - {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}
type typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Optimise.Checkpoint.typ = -
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None
val init_state : int -> float -> state
val default_checkpoint_fun : (string -> 'a) -> 'a
val print_state_info : state -> unit
val print_summary : state -> unit
val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Clipping/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Clipping/index.html deleted file mode 100644 index d233dbdba..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Clipping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Clipping (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Clipping)

Module Optimise.Clipping

type typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Optimise.Clipping.typ = -
  1. | L2norm of float
  2. | Value of float * float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Gradient/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Gradient/index.html deleted file mode 100644 index 6d6311668..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Gradient/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Gradient (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Gradient)

Module Optimise.Gradient

type typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Optimise.Gradient.typ = -
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton
val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Learning_Rate/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Learning_Rate/index.html deleted file mode 100644 index 0ba40bff5..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Learning_Rate (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

type typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Optimise.Learning_Rate.typ = -
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t
val default : typ -> typ
val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Loss/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Loss/index.html deleted file mode 100644 index 8656a935b..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Loss/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Loss (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Loss)

Module Optimise.Loss

type typ = Owl_neural_generic.Make_Embedded(Engine).Neuron.Optimise.Loss.typ =
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Momentum/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Momentum/index.html deleted file mode 100644 index 037ee5e00..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Momentum/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Momentum (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Momentum)

Module Optimise.Momentum

type typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Optimise.Momentum.typ = -
  1. | Standard of float
  2. | Nesterov of float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Params/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Params/index.html deleted file mode 100644 index 9f84e5238..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Params/index.html +++ /dev/null @@ -1,15 +0,0 @@ - -Params (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Params)

Module Optimise.Params

type typ = Owl_neural_generic.Make_Embedded(Engine).Neuron.Optimise.Params.typ = - {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}
val default : unit -> typ
val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Regularisation/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Regularisation/index.html deleted file mode 100644 index 32b0f5c8c..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Regularisation/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Regularisation (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Regularisation)

Module Optimise.Regularisation

type typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Optimise.Regularisation.typ = -
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None
val run : typ -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Stopping/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Stopping/index.html deleted file mode 100644 index 6fe58fd54..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Stopping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Stopping (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Stopping)

Module Optimise.Stopping

type typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Optimise.Stopping.typ = -
  1. | Const of float
  2. | Early of int * int
  3. | None
val run : typ -> float -> bool
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Utils/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Utils/index.html deleted file mode 100644 index b6840fc9d..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise.Utils)

Module Optimise.Utils

val sample_num : Algodiff.t -> int
val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/index.html deleted file mode 100644 index ea2d0e986..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Optimise)

Module Neuron.Optimise

module Algodiff : sig ... end
module Utils : sig ... end
module Learning_Rate : sig ... end
module Batch : sig ... end
module Loss : sig ... end
module Gradient : sig ... end
module Momentum : sig ... end
module Regularisation : sig ... end
module Clipping : sig ... end
module Stopping : sig ... end
module Checkpoint : sig ... end
module Params : sig ... end
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Padding1D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Padding1D/index.html deleted file mode 100644 index 700e80164..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Padding1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding1D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Padding1D)

Module Neuron.Padding1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Padding2D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Padding2D/index.html deleted file mode 100644 index 71979e1f6..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Padding2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Padding2D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Padding2D)

Module Neuron.Padding2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Padding2D.neuron_typ = - {
  1. mutable padding : int array array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int array array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Padding3D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Padding3D/index.html deleted file mode 100644 index 739fd1272..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Padding3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding3D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Padding3D)

Module Neuron.Padding3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Recurrent/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Recurrent/index.html deleted file mode 100644 index 2fbf07660..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Recurrent/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Recurrent (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Recurrent)

Module Neuron.Recurrent

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Recurrent.neuron_typ = - {
  1. mutable whh : Optimise.Algodiff.t;
  2. mutable wxh : Optimise.Algodiff.t;
  3. mutable why : Optimise.Algodiff.t;
  4. mutable bh : Optimise.Algodiff.t;
  5. mutable by : Optimise.Algodiff.t;
  6. mutable h : Optimise.Algodiff.t;
  7. mutable hiddens : int;
  8. mutable act : Activation.typ;
  9. mutable init_typ : Init.typ;
  10. mutable in_shape : int array;
  11. mutable out_shape : int array;
}
val create : - ?time_steps:int -> - ?inputs:int -> - int -> - int -> - Activation.typ -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Reshape/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Reshape/index.html deleted file mode 100644 index 01d98d0e6..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Reshape/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Reshape (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Reshape)

Module Neuron.Reshape

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Reshape.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : ?inputs:int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Slice/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Slice/index.html deleted file mode 100644 index 0e70ef820..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/Slice/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Slice (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.Slice)

Module Neuron.Slice

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.Slice.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
  3. mutable slice : int list list;
}
val create : int list list -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/TransposeConv1D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/TransposeConv1D/index.html deleted file mode 100644 index 5ddbe73aa..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/TransposeConv1D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -TransposeConv1D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.TransposeConv1D)

Module Neuron.TransposeConv1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.TransposeConv1D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/TransposeConv2D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/TransposeConv2D/index.html deleted file mode 100644 index 0f7f59e3b..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/TransposeConv2D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -TransposeConv2D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.TransposeConv2D)

Module Neuron.TransposeConv2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.TransposeConv2D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/TransposeConv3D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/TransposeConv3D/index.html deleted file mode 100644 index 7fc843cca..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/TransposeConv3D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -TransposeConv3D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.TransposeConv3D)

Module Neuron.TransposeConv3D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.TransposeConv3D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/UpSampling1D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/UpSampling1D/index.html deleted file mode 100644 index fd6db46e5..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/UpSampling1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling1D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.UpSampling1D)

Module Neuron.UpSampling1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/UpSampling2D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/UpSampling2D/index.html deleted file mode 100644 index ed88baf3d..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/UpSampling2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -UpSampling2D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.UpSampling2D)

Module Neuron.UpSampling2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Engine).Neuron.UpSampling2D.neuron_typ = - {
  1. mutable size : int array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/UpSampling3D/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/UpSampling3D/index.html deleted file mode 100644 index 6e76e2a2b..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/UpSampling3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling3D (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron.UpSampling3D)

Module Neuron.UpSampling3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/index.html deleted file mode 100644 index d7745c285..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/Neuron/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Neuron (owl-base.Owl_neural_compiler.Make.Neural.Graph.Neuron)

Module Graph.Neuron

module Optimise : sig ... end
module Init : sig ... end
module Input : sig ... end
module Activation : sig ... end
module Linear : sig ... end
module LinearNoBias : sig ... end
module Recurrent : sig ... end
module LSTM : sig ... end
module GRU : sig ... end
module Conv1D : sig ... end
module Conv2D : sig ... end
module Conv3D : sig ... end
module DilatedConv1D : sig ... end
module DilatedConv2D : sig ... end
module DilatedConv3D : sig ... end
module TransposeConv1D : sig ... end
module TransposeConv2D : sig ... end
module TransposeConv3D : sig ... end
module FullyConnected : sig ... end
module MaxPool1D : sig ... end
module MaxPool2D : sig ... end
module AvgPool1D : sig ... end
module AvgPool2D : sig ... end
module GlobalMaxPool1D : sig ... end
module GlobalMaxPool2D : sig ... end
module GlobalAvgPool1D : sig ... end
module GlobalAvgPool2D : sig ... end
module UpSampling1D : sig ... end
module UpSampling2D : sig ... end
module UpSampling3D : sig ... end
module Padding1D : sig ... end
module Padding2D : sig ... end
module Padding3D : sig ... end
module Lambda : sig ... end
module LambdaArray : sig ... end
module Dropout : sig ... end
module Reshape : sig ... end
module Flatten : sig ... end
module Slice : sig ... end
module Add : sig ... end
module Mul : sig ... end
module Dot : sig ... end
module Max : sig ... end
module Average : sig ... end
module Concatenate : sig ... end
module Normalisation : sig ... end
module GaussianNoise : sig ... end
module GaussianDropout : sig ... end
module AlphaDropout : sig ... end
module Embedding : sig ... end
module Masking : sig ... end
type neuron = Owl_neural_generic.Make_Embedded(Engine).Neuron.neuron =
  1. | Input of Input.neuron_typ
  2. | Linear of Linear.neuron_typ
  3. | LinearNoBias of LinearNoBias.neuron_typ
  4. | Embedding of Embedding.neuron_typ
  5. | LSTM of LSTM.neuron_typ
  6. | GRU of GRU.neuron_typ
  7. | Recurrent of Recurrent.neuron_typ
  8. | Conv1D of Conv1D.neuron_typ
  9. | Conv2D of Conv2D.neuron_typ
  10. | Conv3D of Conv3D.neuron_typ
  11. | DilatedConv1D of DilatedConv1D.neuron_typ
  12. | DilatedConv2D of DilatedConv2D.neuron_typ
  13. | DilatedConv3D of DilatedConv3D.neuron_typ
  14. | TransposeConv1D of TransposeConv1D.neuron_typ
  15. | TransposeConv2D of TransposeConv2D.neuron_typ
  16. | TransposeConv3D of TransposeConv3D.neuron_typ
  17. | FullyConnected of FullyConnected.neuron_typ
  18. | MaxPool1D of MaxPool1D.neuron_typ
  19. | MaxPool2D of MaxPool2D.neuron_typ
  20. | AvgPool1D of AvgPool1D.neuron_typ
  21. | AvgPool2D of AvgPool2D.neuron_typ
  22. | GlobalMaxPool1D of GlobalMaxPool1D.neuron_typ
  23. | GlobalMaxPool2D of GlobalMaxPool2D.neuron_typ
  24. | GlobalAvgPool1D of GlobalAvgPool1D.neuron_typ
  25. | GlobalAvgPool2D of GlobalAvgPool2D.neuron_typ
  26. | UpSampling2D of UpSampling2D.neuron_typ
  27. | Padding2D of Padding2D.neuron_typ
  28. | Dropout of Dropout.neuron_typ
  29. | Reshape of Reshape.neuron_typ
  30. | Flatten of Flatten.neuron_typ
  31. | Slice of Slice.neuron_typ
  32. | Lambda of Lambda.neuron_typ
  33. | LambdaArray of LambdaArray.neuron_typ
  34. | Activation of Activation.neuron_typ
  35. | GaussianNoise of GaussianNoise.neuron_typ
  36. | GaussianDropout of GaussianDropout.neuron_typ
  37. | AlphaDropout of AlphaDropout.neuron_typ
  38. | Normalisation of Normalisation.neuron_typ
  39. | Add of Add.neuron_typ
  40. | Mul of Mul.neuron_typ
  41. | Dot of Dot.neuron_typ
  42. | Max of Max.neuron_typ
  43. | Average of Average.neuron_typ
  44. | Concatenate of Concatenate.neuron_typ
val get_in_out_shape : neuron -> int array * int array
val get_in_shape : neuron -> int array
val get_out_shape : neuron -> int array
val connect : int array array -> neuron -> unit
val init : neuron -> unit
val reset : neuron -> unit
val mktag : int -> neuron -> unit
val mkpar : neuron -> Optimise.Algodiff.t array
val mkpri : neuron -> Optimise.Algodiff.t array
val mkadj : neuron -> Optimise.Algodiff.t array
val update : neuron -> Optimise.Algodiff.t array -> unit
val load_weights : neuron -> Optimise.Algodiff.t array -> unit
val save_weights : neuron -> Optimise.Algodiff.t array
val copy : neuron -> neuron
val to_string : neuron -> string
val to_name : neuron -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/Graph/index.html b/owl-base/Owl_neural_compiler/Make/Neural/Graph/index.html deleted file mode 100644 index b46cb2c2a..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/Graph/index.html +++ /dev/null @@ -1,243 +0,0 @@ - -Graph (owl-base.Owl_neural_compiler.Make.Neural.Graph)

Module Neural.Graph

module Neuron : sig ... end
type node = Owl_neural_generic.Make_Embedded(Engine).node = {
  1. mutable name : string;
  2. mutable prev : node array;
  3. mutable next : node array;
  4. mutable neuron : Neuron.neuron;
  5. mutable output : Neuron.Optimise.Algodiff.t option;
  6. mutable network : network;
  7. mutable train : bool;
}
and network = Owl_neural_generic.Make_Embedded(Engine).network = {
  1. mutable nnid : string;
  2. mutable size : int;
  3. mutable roots : node array;
  4. mutable outputs : node array;
  5. mutable topo : node array;
}
val make_network : ?nnid:string -> int -> node array -> node array -> network
val make_node : - ?name:string -> - ?train:bool -> - node array -> - node array -> - Neuron.neuron -> - Neuron.Optimise.Algodiff.t option -> - network -> - node
val get_roots : network -> node array
val get_outputs : network -> node array
val get_node : network -> string -> node
val get_network : ?name:string -> node -> network
val outputs : ?name:string -> node array -> network
val get_network_name : network -> string
val set_network_name : network -> string -> unit
val collect_output : node array -> Neuron.Optimise.Algodiff.t array
val connect_pair : node -> node -> unit
val connect_to_parents : node array -> node -> unit
val add_node : - ?act_typ:Neuron.Activation.typ -> - network -> - node array -> - node -> - node
val input_shape : network -> int array
val input_shapes : network -> int array array
val init : network -> unit
val reset : network -> unit
val mktag : int -> network -> unit
val mkpar : network -> Neuron.Optimise.Algodiff.t array array
val mkpri : network -> Neuron.Optimise.Algodiff.t array array
val mkadj : network -> Neuron.Optimise.Algodiff.t array array
val update : network -> Neuron.Optimise.Algodiff.t array array -> unit
val run_inputs : - Neuron.Optimise.Algodiff.t array -> - network -> - Neuron.Optimise.Algodiff.t array
val forward_inputs : - network -> - Neuron.Optimise.Algodiff.t array -> - Neuron.Optimise.Algodiff.t array * Neuron.Optimise.Algodiff.t array array
val backward : - network -> - Neuron.Optimise.Algodiff.t -> - Neuron.Optimise.Algodiff.t array array - * Neuron.Optimise.Algodiff.t array array
val copy : network -> network
val model_inputs : - network -> - Neuron.Optimise.Algodiff.A.arr array -> - Neuron.Optimise.Algodiff.A.arr array
val input : ?name:string -> int array -> node
val inputs : ?names:string array -> int array array -> node array
val activation : ?name:string -> Neuron.Activation.typ -> node -> node
val linear : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val linear_nobias : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val embedding : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node
val recurrent : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node
val lstm : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node
val gru : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node
val conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val dilated_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val dilated_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val dilated_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val transpose_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val transpose_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val transpose_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val fully_connected : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val max_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val max_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val avg_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val avg_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val global_max_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_max_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_avg_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_avg_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val upsampling2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - node -> - node
val padding2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array array -> - node -> - node
val dropout : ?name:string -> float -> node -> node
val gaussian_noise : ?name:string -> float -> node -> node
val gaussian_dropout : ?name:string -> float -> node -> node
val alpha_dropout : ?name:string -> float -> node -> node
val normalisation : - ?name:string -> - ?axis:int -> - ?training:bool -> - ?decay:float -> - ?mu:Neuron.Optimise.Algodiff.A.arr -> - ?var:Neuron.Optimise.Algodiff.A.arr -> - node -> - node
val reshape : ?name:string -> int array -> node -> node
val flatten : ?name:string -> node -> node
val slice : ?name:string -> int list list -> node -> node
val lambda : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - ?out_shape:int array -> - (Neuron.Optimise.Algodiff.t -> Neuron.Optimise.Algodiff.t) -> - node -> - node
val lambda_array : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - (Neuron.Optimise.Algodiff.t array -> Neuron.Optimise.Algodiff.t) -> - node array -> - node
val add : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val mul : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val dot : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val max : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val average : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node array -> - node
val concatenate : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int -> - node array -> - node
val to_string : network -> string
val pp_network : Stdlib.Format.formatter -> network -> unit
val print : network -> unit
val save : ?unsafe:bool -> network -> string -> unit
val load : string -> network
val save_weights : network -> string -> unit
val load_weights : network -> string -> unit
val make_subnetwork : - ?copy:bool -> - ?make_inputs:string array -> - network -> - string array -> - network
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/Neural/index.html b/owl-base/Owl_neural_compiler/Make/Neural/index.html deleted file mode 100644 index c147cc73b..000000000 --- a/owl-base/Owl_neural_compiler/Make/Neural/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Neural (owl-base.Owl_neural_compiler.Make.Neural)

Module Make.Neural

module Graph : sig ... end
module Optimise = Graph.Neuron.Optimise
module Init = Graph.Neuron.Init
module Activation = Graph.Neuron.Activation
module Regularisation = Graph.Neuron.Optimise.Regularisation
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Linalg/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Linalg/index.html deleted file mode 100644 index 6ccb9afc5..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_neural_compiler.Make.E.Graph.Optimiser.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Mat/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Mat/index.html deleted file mode 100644 index 516020321..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_compiler.Make.E.Graph.Optimiser.Operator.Mat)

Module Operator.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Scalar/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Scalar/index.html deleted file mode 100644 index 214d00e3c..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_neural_compiler.Make.E.Graph.Optimiser.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index 13180af94..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_compiler.Make.E.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index 19c5d42c1..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_compiler.Make.E.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index 0a4002d1e..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_neural_compiler.Make.E.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index 9a6a1d119..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_neural_compiler.Make.E.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 564076f82..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_neural_compiler.Make.E.Graph.Optimiser.Operator.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index 37155e145..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_neural_compiler.Make.E.Graph.Optimiser.Operator.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/index.html deleted file mode 100644 index c5e9abcef..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_neural_compiler.Make.E.Graph.Optimiser.Operator.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/index.html deleted file mode 100644 index 5a7202735..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_neural_compiler.Make.E.Graph.Optimiser.Operator.Symbol)

Module Operator.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/index.html deleted file mode 100644 index f888ce65e..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_neural_compiler.Make.E.Graph.Optimiser.Operator)

Module Optimiser.Operator

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/index.html deleted file mode 100644 index 5bdb98931..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/Optimiser/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Optimiser (owl-base.Owl_neural_compiler.Make.E.Graph.Optimiser)

Module Graph.Optimiser

Core functions
val estimate_complexity : 'a Owl_graph.node array -> int * int

TODO

val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/index.html deleted file mode 100644 index dc162779f..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/Graph/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Graph (owl-base.Owl_neural_compiler.Make.E.Graph)

Module E.Graph

Type definition
type graph

TODO

Core functions
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string

TODO

val graph_to_dot : graph -> string

TODO

val graph_to_trace : graph -> string

TODO

val save_graph : 'a -> string -> unit

TODO

val load_graph : string -> 'a * 'b

TODO

val invalidate_rvs : graph -> unit

TODO

val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool

TODO

val update_iopair : graph -> unit

TODO

val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array

TODO

val optimise : graph -> unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/argument-1-E/index.html b/owl-base/Owl_neural_compiler/Make/argument-1-E/index.html deleted file mode 100644 index 1e96082ba..000000000 --- a/owl-base/Owl_neural_compiler/Make/argument-1-E/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -E (owl-base.Owl_neural_compiler.Make.E)

Parameter Make.E

Core evaluation functions of the engine

TODO

TODO

val eval_graph : Graph.graph -> unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_neural_compiler/Make/index.html b/owl-base/Owl_neural_compiler/Make/index.html deleted file mode 100644 index ac643e930..000000000 --- a/owl-base/Owl_neural_compiler/Make/index.html +++ /dev/null @@ -1,49 +0,0 @@ - -Make (owl-base.Owl_neural_compiler.Make)

Module Owl_neural_compiler.Make

Parameters

Signature

module Engine : sig ... end
module Neural : sig ... end

Naive compilation functions, need to pass in loss function

Shallow compilation functions, includes only gradient

Deep compilation functions, includes gs, us, ps, ch, and new weights

val make_eval_fun : - 'a -> - Neural.Algodiff.t -> - Neural.Algodiff.t -> - Engine.Graph.graph -> - Neural.Algodiff.t -> - Neural.Algodiff.t -> - 'b
val make_update_fun : Engine.graph -> unit -> unit
val model_inputs : - ?optimise:bool -> - ?batch_size:int -> - Neural.Graph.network -> - Neural.Algodiff.t array -> - Neural.Algodiff.t array
val model : - ?optimise:bool -> - ?batch_size:int -> - Neural.Graph.network -> - Neural.Algodiff.t -> - Neural.Algodiff.t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/.dummy b/owl-base/Owl_neural_generic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Activation/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Activation/index.html deleted file mode 100644 index ab9fea0a5..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Activation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Activation (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Activation)

Module Neuron.Activation

type typ =
  1. | Elu
  2. | Relu
  3. | Sigmoid
  4. | HardSigmoid
  5. | Softmax of int
  6. | Softplus
  7. | Softsign
  8. | Tanh
  9. | Relu6
  10. | LeakyRelu of float
  11. | TRelu of float
  12. | Custom of Optimise.Algodiff.t -> Optimise.Algodiff.t
  13. | None
    (*

    Types of activation functions.

    *)
type neuron_typ = {
  1. mutable activation : typ;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val run_activation : Optimise.Algodiff.t -> typ -> Optimise.Algodiff.t

Run one specific activation function.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val activation_to_string : typ -> string

Return the name of a specific activation function.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Add/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Add/index.html deleted file mode 100644 index 70a01cc83..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Add/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Add (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Add)

Module Neuron.Add

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/AlphaDropout/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/AlphaDropout/index.html deleted file mode 100644 index 6e89124ac..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/AlphaDropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AlphaDropout (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.AlphaDropout)

Module Neuron.AlphaDropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Average/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Average/index.html deleted file mode 100644 index b6fd4d746..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Average/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Average (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Average)

Module Neuron.Average

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/AvgPool1D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/AvgPool1D/index.html deleted file mode 100644 index 360cd3f81..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/AvgPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AvgPool1D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.AvgPool1D)

Module Neuron.AvgPool1D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/AvgPool2D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/AvgPool2D/index.html deleted file mode 100644 index f90afa5b2..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/AvgPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AvgPool2D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.AvgPool2D)

Module Neuron.AvgPool2D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Concatenate/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Concatenate/index.html deleted file mode 100644 index bf919fef0..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Concatenate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Concatenate (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Concatenate)

Module Neuron.Concatenate

type neuron_typ = {
  1. mutable axis : int;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : int -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Conv1D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Conv1D/index.html deleted file mode 100644 index a9ecc4172..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Conv1D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv1D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Conv1D)

Module Neuron.Conv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Conv2D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Conv2D/index.html deleted file mode 100644 index de04f6d0f..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Conv2D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv2D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Conv2D)

Module Neuron.Conv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Conv3D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Conv3D/index.html deleted file mode 100644 index 8d0ff7abe..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Conv3D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv3D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Conv3D)

Module Neuron.Conv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/DilatedConv1D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/DilatedConv1D/index.html deleted file mode 100644 index c8b87621a..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/DilatedConv1D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv1D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.DilatedConv1D)

Module Neuron.DilatedConv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/DilatedConv2D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/DilatedConv2D/index.html deleted file mode 100644 index 9e2c209a8..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/DilatedConv2D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv2D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.DilatedConv2D)

Module Neuron.DilatedConv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/DilatedConv3D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/DilatedConv3D/index.html deleted file mode 100644 index affcc5aff..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/DilatedConv3D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv3D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.DilatedConv3D)

Module Neuron.DilatedConv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Dot/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Dot/index.html deleted file mode 100644 index 7caeed222..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Dot/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Dot (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Dot)

Module Neuron.Dot

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Dropout/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Dropout/index.html deleted file mode 100644 index b96d9a6d8..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Dropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Dropout (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Dropout)

Module Neuron.Dropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Embedding/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Embedding/index.html deleted file mode 100644 index 834a24cdc..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Embedding/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Embedding (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Embedding)

Module Neuron.Embedding

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_dim : int;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Flatten/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Flatten/index.html deleted file mode 100644 index 32f68b386..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Flatten/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Flatten (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Flatten)

Module Neuron.Flatten

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/FullyConnected/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/FullyConnected/index.html deleted file mode 100644 index 2ff2abdf4..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/FullyConnected/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -FullyConnected (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.FullyConnected)

Module Neuron.FullyConnected

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GRU/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GRU/index.html deleted file mode 100644 index ada5f46d4..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GRU/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GRU (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.GRU)

Module Neuron.GRU

type neuron_typ = {
  1. mutable wxz : Optimise.Algodiff.t;
  2. mutable whz : Optimise.Algodiff.t;
  3. mutable wxr : Optimise.Algodiff.t;
  4. mutable whr : Optimise.Algodiff.t;
  5. mutable wxh : Optimise.Algodiff.t;
  6. mutable whh : Optimise.Algodiff.t;
  7. mutable bz : Optimise.Algodiff.t;
  8. mutable br : Optimise.Algodiff.t;
  9. mutable bh : Optimise.Algodiff.t;
  10. mutable h : Optimise.Algodiff.t;
  11. mutable init_typ : Init.typ;
  12. mutable in_shape : int array;
  13. mutable out_shape : int array;
}

Neuron type definition.

val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GaussianDropout/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GaussianDropout/index.html deleted file mode 100644 index cbb75de06..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GaussianDropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GaussianDropout (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.GaussianDropout)

Module Neuron.GaussianDropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GaussianNoise/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GaussianNoise/index.html deleted file mode 100644 index f5c99b1cd..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GaussianNoise/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GaussianNoise (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.GaussianNoise)

Module Neuron.GaussianNoise

type neuron_typ = {
  1. mutable sigma : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalAvgPool1D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalAvgPool1D/index.html deleted file mode 100644 index 915c14215..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalAvgPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalAvgPool1D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.GlobalAvgPool1D)

Module Neuron.GlobalAvgPool1D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalAvgPool2D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalAvgPool2D/index.html deleted file mode 100644 index e266b5573..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalAvgPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalAvgPool2D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.GlobalAvgPool2D)

Module Neuron.GlobalAvgPool2D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalMaxPool1D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalMaxPool1D/index.html deleted file mode 100644 index 83b189ccc..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalMaxPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalMaxPool1D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.GlobalMaxPool1D)

Module Neuron.GlobalMaxPool1D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalMaxPool2D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalMaxPool2D/index.html deleted file mode 100644 index 0491c6e71..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/GlobalMaxPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalMaxPool2D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.GlobalMaxPool2D)

Module Neuron.GlobalMaxPool2D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Init/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Init/index.html deleted file mode 100644 index 673ad6758..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Init/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Init (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Init)

Module Neuron.Init

type typ =
  1. | Uniform of float * float
  2. | Gaussian of float * float
  3. | Standard
  4. | Tanh
  5. | GlorotNormal
  6. | GlorotUniform
  7. | LecunNormal
  8. | HeNormal
  9. | Custom of int array -> Optimise.Algodiff.t
    (*

    Initialisation types

    *)
val calc_fans : int array -> float * float

Calculate fan-in and fan-out of weights.

val run : typ -> int array -> Optimise.Algodiff.t -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Input/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Input/index.html deleted file mode 100644 index ee73cae97..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Input/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Input (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Input)

Module Neuron.Input

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : int array -> neuron_typ

Create the neuron.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/LSTM/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/LSTM/index.html deleted file mode 100644 index c4a148723..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/LSTM/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -LSTM (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.LSTM)

Module Neuron.LSTM

type neuron_typ = {
  1. mutable wxi : Optimise.Algodiff.t;
  2. mutable whi : Optimise.Algodiff.t;
  3. mutable wxc : Optimise.Algodiff.t;
  4. mutable whc : Optimise.Algodiff.t;
  5. mutable wxf : Optimise.Algodiff.t;
  6. mutable whf : Optimise.Algodiff.t;
  7. mutable wxo : Optimise.Algodiff.t;
  8. mutable who : Optimise.Algodiff.t;
  9. mutable bi : Optimise.Algodiff.t;
  10. mutable bc : Optimise.Algodiff.t;
  11. mutable bf : Optimise.Algodiff.t;
  12. mutable bo : Optimise.Algodiff.t;
  13. mutable c : Optimise.Algodiff.t;
  14. mutable h : Optimise.Algodiff.t;
  15. mutable init_typ : Init.typ;
  16. mutable in_shape : int array;
  17. mutable out_shape : int array;
}

Neuron type definition.

val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Lambda/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Lambda/index.html deleted file mode 100644 index d4d6da9cc..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Lambda/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Lambda (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Lambda)

Module Neuron.Lambda

type neuron_typ = {
  1. mutable lambda : Optimise.Algodiff.t -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?out_shape:int array -> - (Optimise.Algodiff.t -> Optimise.Algodiff.t) -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/LambdaArray/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/LambdaArray/index.html deleted file mode 100644 index 692db12cf..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/LambdaArray/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -LambdaArray (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.LambdaArray)

Module Neuron.LambdaArray

type neuron_typ = {
  1. mutable lambda : Optimise.Algodiff.t array -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : - int array -> - (Optimise.Algodiff.t array -> Optimise.Algodiff.t) -> - neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Linear/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Linear/index.html deleted file mode 100644 index dd343b720..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Linear/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Linear (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Linear)

Module Neuron.Linear

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/LinearNoBias/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/LinearNoBias/index.html deleted file mode 100644 index c39eeec49..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/LinearNoBias/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -LinearNoBias (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.LinearNoBias)

Module Neuron.LinearNoBias

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_shape : int array;
  4. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Masking/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Masking/index.html deleted file mode 100644 index b795e816d..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Masking/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Masking (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Masking)

Module Neuron.Masking

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Max/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Max/index.html deleted file mode 100644 index 88d2404ac..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Max/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Max (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Max)

Module Neuron.Max

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/MaxPool1D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/MaxPool1D/index.html deleted file mode 100644 index 1d09ef18b..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/MaxPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MaxPool1D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.MaxPool1D)

Module Neuron.MaxPool1D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/MaxPool2D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/MaxPool2D/index.html deleted file mode 100644 index 1ba461429..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/MaxPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MaxPool2D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.MaxPool2D)

Module Neuron.MaxPool2D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Mul/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Mul/index.html deleted file mode 100644 index c223ac0e6..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Mul/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mul (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Mul)

Module Neuron.Mul

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Normalisation/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Normalisation/index.html deleted file mode 100644 index bf2be7454..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Normalisation/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Normalisation (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Normalisation)

Module Neuron.Normalisation

type neuron_typ = {
  1. mutable axis : int;
  2. mutable beta : Optimise.Algodiff.t;
  3. mutable gamma : Optimise.Algodiff.t;
  4. mutable mu : Optimise.Algodiff.t;
  5. mutable var : Optimise.Algodiff.t;
  6. mutable decay : Optimise.Algodiff.t;
  7. mutable training : bool;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?training:bool -> - ?decay:float -> - ?mu:Optimise.Algodiff.A.arr -> - ?var:Optimise.Algodiff.A.arr -> - int -> - neuron_typ

Create the neuron. Note that axis 0 is the batch axis.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the trainable parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update trainable parameters of the neuron, used by Optimise module.

val load_weights : neuron_typ -> Optimise.Algodiff.t array -> unit

Load both trainable and non-trainable parameters into the neuron.

val save_weights : neuron_typ -> Optimise.Algodiff.t array

Assemble both trainable and non-trainable parameters of the neuron.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index 04059e77a..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/Mat/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index 09ada62d9..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index e323bb243..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/index.html deleted file mode 100644 index 08e4fd141..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.A)

Module Algodiff.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Arr/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Arr/index.html deleted file mode 100644 index 09f517e0c..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/index.html deleted file mode 100644 index ec58cbc9b..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.Builder)

Module Algodiff.Builder

Ops Builder
module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t

build array input single output operations

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index 08da696bc..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index fc7d59157..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index bac71b555..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index 481bc159a..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index 3c91cb3aa..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index bc59dc088..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Linalg/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index f38bf1dd2..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val logdet : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val chol : ?upper:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val qr : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val lq : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val svd : ?thin:bool -> t -> t * t * t

Refer to :doc:`owl_dense_ndarray_generic`

val sylvester : t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val lyapunov : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - t -> - t -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val (/@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val care : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dare : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Mat/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Mat/index.html deleted file mode 100644 index 738df55ea..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Maths/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Maths/index.html deleted file mode 100644 index cd33726a8..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (-) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (/) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (**) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val add : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val div : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val kron : t -> t -> t

Refer to :doc:`owl_dense_matrix_generic`

val dot : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cross_entropy : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val abs : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val round : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mean : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val swap : int -> int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_item : t -> int -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_row : t -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val concat : axis:int -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val split : axis:int -> int array -> t -> t array

Refer to :doc:`owl_dense_ndarray_generic`

val of_arrays : t array array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val to_arrays : t -> t array array

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diag : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diagm : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val trace : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val triu : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tril : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/NN/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/NN/index.html deleted file mode 100644 index 7ebb4e767..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:A.elt -> int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/index.html deleted file mode 100644 index bc26ea79e..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Algodiff/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Algodiff (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Algodiff)

Module Optimise.Algodiff

include Owl_algodiff_core_sig.Sig
Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

val make_forward : t -> t -> int -> t

TODO

val make_reverse : t -> int -> t

TODO

val reverse_prop : t -> t -> unit

TODO

val diff : (t -> t) -> t -> t

diff f x returns the exat derivative of a function f : scalar -> scalar at point x. Simply calling diff f will return its derivative function g of the same type, i.e. g : scalar -> scalar.

Keep calling this function will give you higher-order derivatives of f, i.e. f |> diff |> diff |> diff |> ...

val diff' : (t -> t) -> t -> t * t

similar to diff, but return (f x, diff f x).

val grad : (t -> t) -> t -> t

gradient of f : (vector -> scalar) at x, reverse ad.

val grad' : (t -> t) -> t -> t * t

similar to grad, but return (f x, grad f x).

val jacobian : (t -> t) -> t -> t

jacobian of f : (vector -> vector) at x, both x and y are row vectors.

val jacobian' : (t -> t) -> t -> t * t

similar to jacobian, but return (f x, jacobian f x)

val jacobianv : (t -> t) -> t -> t -> t

jacobian vector product of f : (vector -> vector) at x along v, forward ad. Namely, it calcultes (jacobian x) v

val jacobianv' : (t -> t) -> t -> t -> t * t

similar to jacobianv', but return (f x, jacobianv f x v)

val jacobianTv : (t -> t) -> t -> t -> t

transposed jacobian vector product of f : (vector -> vector) at x along v, backward ad. Namely, it calculates transpose ((jacobianv f x v)).

val jacobianTv' : (t -> t) -> t -> t -> t * t

similar to jacobianTv, but return (f x, transpose (jacobianv f x v))

val hessian : (t -> t) -> t -> t

hessian of f : (scalar -> scalar) at x.

val hessian' : (t -> t) -> t -> t * t

simiarl to hessian, but return (f x, hessian f x)

val hessianv : (t -> t) -> t -> t -> t

hessian vector product of f : (scalar -> scalar) at x along v. Namely, it calculates (hessian x) v.

val hessianv' : (t -> t) -> t -> t -> t * t

similar to hessianv, but return (f x, hessianv f x v).

val laplacian : (t -> t) -> t -> t

laplacian of f : (scalar -> scalar) at x.

val laplacian' : (t -> t) -> t -> t * t

similar to laplacian, but return (f x, laplacian f x).

val gradhessian : (t -> t) -> t -> t * t

return (grad f x, hessian f x), f : (scalar -> scalar)

val gradhessian' : (t -> t) -> t -> t * t * t

return (f x, grad f x, hessian f x)

val gradhessianv : (t -> t) -> t -> t -> t * t

return (grad f x v, hessian f x v)

val gradhessianv' : (t -> t) -> t -> t -> t * t * t

return (f x, grad f x v, hessian f x v)

include Owl_algodiff_ops_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
module Builder : - Owl_algodiff_ops_builder_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
Supported Maths functions
module Maths : sig ... end
Supported Linalg functions
module Linalg : sig ... end
Supported Neural Network functions
module NN : sig ... end
Supported Mat functions
module Mat : sig ... end
Supported Arr functions
module Arr : sig ... end
Helper functions
include Owl_algodiff_graph_convert_sig.Sig with type t := t
val to_trace : t list -> string

to_trace [t0; t1; ...] outputs the trace of computation graph on the terminal in a human-readable format.

val to_dot : t list -> string

to_dot [t0; t1; ...] outputs the trace of computation graph in the dot file format which you can use other tools further visualisation, such as Graphviz.

val pp_num : Stdlib.Format.formatter -> t -> unit

pp_num t pretty prints the abstract number used in Algodiff.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Batch/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Batch/index.html deleted file mode 100644 index 68990a947..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Batch/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Batch (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Batch)

Module Optimise.Batch

Batch module

type typ =
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic

Types of batches.

val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

Execute the computations defined in module typ.

val batches : typ -> Algodiff.t -> int

Return the total number of batches given a batch typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Checkpoint/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Checkpoint/index.html deleted file mode 100644 index 302f82bac..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Checkpoint/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Checkpoint (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Checkpoint)

Module Optimise.Checkpoint

Checkpoint module

type state = {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}

Type definition of checkpoint

type typ =
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None

Batch type.

val init_state : int -> float -> state

init_state batches_per_epoch epochs initialises a state by specifying the number of batches per epoch and the number of epochs in total.

val default_checkpoint_fun : (string -> 'a) -> 'a

This function is used for saving intermediate files during optimisation.

val print_state_info : state -> unit

Print out the detail information of current state.

val print_summary : state -> unit

Print out the summary of current state.

val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Clipping/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Clipping/index.html deleted file mode 100644 index 49be118ec..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Clipping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Clipping (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Clipping)

Module Optimise.Clipping

Clipping module

type typ =
  1. | L2norm of float
  2. | Value of float * float
  3. | None

Types of clipping functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Gradient/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Gradient/index.html deleted file mode 100644 index d92e66f50..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Gradient/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Gradient (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Gradient)

Module Optimise.Gradient

Gradient module

type typ =
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton

Types of gradient function.

val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Learning_Rate/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Learning_Rate/index.html deleted file mode 100644 index d0caa230b..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Learning_Rate (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

Strategies for learning rate update

type typ =
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array

Representation of learning rate update strategies. Possible values include:

  • Adam (alpha, beta1, beta2), see ref for parameter meaning
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array

Update the cache of gradients.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Loss/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Loss/index.html deleted file mode 100644 index 13f7140b5..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Loss/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Loss (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Loss)

Module Optimise.Loss

Loss module

type typ =
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t

Types of loss functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Momentum/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Momentum/index.html deleted file mode 100644 index d85c188c2..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Momentum/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Momentum (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Momentum)

Module Optimise.Momentum

Momentum module

type typ =
  1. | Standard of float
  2. | Nesterov of float
  3. | None

Types of momentum functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Params/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Params/index.html deleted file mode 100644 index 9d5539656..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Params/index.html +++ /dev/null @@ -1,14 +0,0 @@ - -Params (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Params)

Module Optimise.Params

Params module

type typ = {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}

Type definition of parameter.

val default : unit -> typ

Create module typ with default values.

val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ

This function creates a parameter object with many configurations.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Regularisation/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Regularisation/index.html deleted file mode 100644 index 4b4f616cb..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Regularisation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Regularisation (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Regularisation)

Module Optimise.Regularisation

Regularisation module

type typ =
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None

Types of regularisation functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Stopping/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Stopping/index.html deleted file mode 100644 index cfbd86a96..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Stopping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Stopping (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Stopping)

Module Optimise.Stopping

Stopping module

type typ =
  1. | Const of float
  2. | Early of int * int
  3. | None

Types of stopping functions.

val run : typ -> float -> bool

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Utils/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Utils/index.html deleted file mode 100644 index 434ec0450..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise.Utils)

Module Optimise.Utils

Utils module

val sample_num : Algodiff.t -> int

Return the total number of samples in passed in ndarray.

val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

draw_samples x y draws samples from both x (observations) and y (labels). The samples will be drew along axis 0, so x and y must agree along axis 0.

val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t

get_chunk x y i c gets a continuous chunk of c samples from position i from x (observations) and y (labels).

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/index.html deleted file mode 100644 index d5f65bfd2..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Optimise)

Module Neuron.Optimise

module Utils : sig ... end

Utils module

module Learning_Rate : sig ... end

Strategies for learning rate update

module Batch : sig ... end

Batch module

module Loss : sig ... end

Loss module

module Gradient : sig ... end

Gradient module

module Momentum : sig ... end

Momentum module

module Regularisation : sig ... end

Regularisation module

module Clipping : sig ... end

Clipping module

module Stopping : sig ... end

Stopping module

module Checkpoint : sig ... end

Checkpoint module

module Params : sig ... end

Params module

Core functions
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises the weight w of passed-in function f.

* f is a function f : w -> x -> y. * w is a row vector but y can have any shape.

val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

This function is specifically designed for minimising the weights in a neural network of graph structure. In Owl's earlier versions, the functions in the regression module were actually implemented using this function.

val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises f : x -> y w.r.t x.

x is an ndarray; and y is an scalar value.

val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

TODO

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Padding1D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Padding1D/index.html deleted file mode 100644 index f8ad58ab1..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Padding1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding1D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Padding1D)

Module Neuron.Padding1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Padding2D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Padding2D/index.html deleted file mode 100644 index e28946a84..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Padding2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding2D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Padding2D)

Module Neuron.Padding2D

type neuron_typ = {
  1. mutable padding : int array array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : int array array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Padding3D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Padding3D/index.html deleted file mode 100644 index b08a43529..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Padding3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding3D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Padding3D)

Module Neuron.Padding3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Recurrent/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Recurrent/index.html deleted file mode 100644 index 96528d85e..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Recurrent/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Recurrent (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Recurrent)

Module Neuron.Recurrent

type neuron_typ = {
  1. mutable whh : Optimise.Algodiff.t;
  2. mutable wxh : Optimise.Algodiff.t;
  3. mutable why : Optimise.Algodiff.t;
  4. mutable bh : Optimise.Algodiff.t;
  5. mutable by : Optimise.Algodiff.t;
  6. mutable h : Optimise.Algodiff.t;
  7. mutable hiddens : int;
  8. mutable act : Activation.typ;
  9. mutable init_typ : Init.typ;
  10. mutable in_shape : int array;
  11. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?time_steps:int -> - ?inputs:int -> - int -> - int -> - Activation.typ -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Reshape/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Reshape/index.html deleted file mode 100644 index c4c43b007..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Reshape/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Reshape (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Reshape)

Module Neuron.Reshape

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Slice/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Slice/index.html deleted file mode 100644 index 06cb295a7..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/Slice/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Slice (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.Slice)

Module Neuron.Slice

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
  3. mutable slice : int list list;
}

Neuron type definition.

val create : int list list -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/TransposeConv1D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/TransposeConv1D/index.html deleted file mode 100644 index cf0d07e54..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/TransposeConv1D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv1D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.TransposeConv1D)

Module Neuron.TransposeConv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/TransposeConv2D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/TransposeConv2D/index.html deleted file mode 100644 index dbc479093..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/TransposeConv2D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv2D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.TransposeConv2D)

Module Neuron.TransposeConv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/TransposeConv3D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/TransposeConv3D/index.html deleted file mode 100644 index 101b2f3c0..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/TransposeConv3D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv3D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.TransposeConv3D)

Module Neuron.TransposeConv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/UpSampling1D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/UpSampling1D/index.html deleted file mode 100644 index 794819850..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/UpSampling1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling1D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.UpSampling1D)

Module Neuron.UpSampling1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/UpSampling2D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/UpSampling2D/index.html deleted file mode 100644 index d70f6daa9..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/UpSampling2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling2D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.UpSampling2D)

Module Neuron.UpSampling2D

type neuron_typ = {
  1. mutable size : int array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/UpSampling3D/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/UpSampling3D/index.html deleted file mode 100644 index 376654721..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/UpSampling3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling3D (owl-base.Owl_neural_generic.Flatten.Graph.Neuron.UpSampling3D)

Module Neuron.UpSampling3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/index.html deleted file mode 100644 index 8b89afbc2..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/Neuron/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Neuron (owl-base.Owl_neural_generic.Flatten.Graph.Neuron)

Module Graph.Neuron

Init neuron
module Init : sig ... end
Input neuron
module Input : sig ... end
Activation neuron
module Activation : sig ... end
Linear neuron
module Linear : sig ... end
LinearNoBias neuron
module LinearNoBias : sig ... end
Recurrent neuron
module Recurrent : sig ... end
LSTM neuron
module LSTM : sig ... end
GRU neuron
module GRU : sig ... end
Conv1D neuron
module Conv1D : sig ... end
Conv2D neuron
module Conv2D : sig ... end
Conv3D neuron
module Conv3D : sig ... end
DilatedConv1D neuron
module DilatedConv1D : sig ... end
DilatedConv2D neuron
module DilatedConv2D : sig ... end
DilatedConv3D neuron
module DilatedConv3D : sig ... end
TransposeConv1D neuron
module TransposeConv1D : sig ... end
TransposeConv2D neuron
module TransposeConv2D : sig ... end
TransposeConv3D neuron
module TransposeConv3D : sig ... end
FullyConnected neuron
module FullyConnected : sig ... end
MaxPool1D neuron
module MaxPool1D : sig ... end
MaxPool2D neuron
module MaxPool2D : sig ... end
AvgPool1D neuron
module AvgPool1D : sig ... end
AvgPool2D neuron
module AvgPool2D : sig ... end
GlobalMaxPool1D neuron
module GlobalMaxPool1D : sig ... end
GlobalMaxPool2D neuron
module GlobalMaxPool2D : sig ... end
GlobalAvgPool1D neuron
module GlobalAvgPool1D : sig ... end
GlobalAvgPool2D neuron
module GlobalAvgPool2D : sig ... end
UpSampling1D neuron
module UpSampling1D : sig ... end
UpSampling2D neuron
module UpSampling2D : sig ... end
UpSampling3D neuron
module UpSampling3D : sig ... end
Padding1D neuron
module Padding1D : sig ... end
Padding2D neuron
module Padding2D : sig ... end
Padding3D neuron
module Padding3D : sig ... end
Lambda neuron
module Lambda : sig ... end
LambdaArray neuron
module LambdaArray : sig ... end
Dropout neuron
module Dropout : sig ... end
Reshape neuron
module Reshape : sig ... end
Flatten neuron
module Flatten : sig ... end
Slice neuron
module Slice : sig ... end
Add neuron
module Add : sig ... end
Mul neuron
module Mul : sig ... end
Dot neuron
module Dot : sig ... end
Max neuron
module Max : sig ... end
Average neuron
module Average : sig ... end
Concatenate neuron
module Concatenate : sig ... end
Normalisation neuron
module Normalisation : sig ... end
GaussianNoise neuron
module GaussianNoise : sig ... end
GaussianDropout neuron
module GaussianDropout : sig ... end
AlphaDropout neuron
module AlphaDropout : sig ... end
Embedding neuron
module Embedding : sig ... end
Masking neuron
module Masking : sig ... end
Core functions
type neuron =
  1. | Input of Input.neuron_typ
  2. | Linear of Linear.neuron_typ
  3. | LinearNoBias of LinearNoBias.neuron_typ
  4. | Embedding of Embedding.neuron_typ
  5. | LSTM of LSTM.neuron_typ
  6. | GRU of GRU.neuron_typ
  7. | Recurrent of Recurrent.neuron_typ
  8. | Conv1D of Conv1D.neuron_typ
  9. | Conv2D of Conv2D.neuron_typ
  10. | Conv3D of Conv3D.neuron_typ
  11. | DilatedConv1D of DilatedConv1D.neuron_typ
  12. | DilatedConv2D of DilatedConv2D.neuron_typ
  13. | DilatedConv3D of DilatedConv3D.neuron_typ
  14. | TransposeConv1D of TransposeConv1D.neuron_typ
  15. | TransposeConv2D of TransposeConv2D.neuron_typ
  16. | TransposeConv3D of TransposeConv3D.neuron_typ
  17. | FullyConnected of FullyConnected.neuron_typ
  18. | MaxPool1D of MaxPool1D.neuron_typ
  19. | MaxPool2D of MaxPool2D.neuron_typ
  20. | AvgPool1D of AvgPool1D.neuron_typ
  21. | AvgPool2D of AvgPool2D.neuron_typ
  22. | GlobalMaxPool1D of GlobalMaxPool1D.neuron_typ
  23. | GlobalMaxPool2D of GlobalMaxPool2D.neuron_typ
  24. | GlobalAvgPool1D of GlobalAvgPool1D.neuron_typ
  25. | GlobalAvgPool2D of GlobalAvgPool2D.neuron_typ
  26. | UpSampling2D of UpSampling2D.neuron_typ
  27. | Padding2D of Padding2D.neuron_typ
  28. | Dropout of Dropout.neuron_typ
  29. | Reshape of Reshape.neuron_typ
  30. | Flatten of Flatten.neuron_typ
  31. | Slice of Slice.neuron_typ
  32. | Lambda of Lambda.neuron_typ
  33. | LambdaArray of LambdaArray.neuron_typ
  34. | Activation of Activation.neuron_typ
  35. | GaussianNoise of GaussianNoise.neuron_typ
  36. | GaussianDropout of GaussianDropout.neuron_typ
  37. | AlphaDropout of AlphaDropout.neuron_typ
  38. | Normalisation of Normalisation.neuron_typ
  39. | Add of Add.neuron_typ
  40. | Mul of Mul.neuron_typ
  41. | Dot of Dot.neuron_typ
  42. | Max of Max.neuron_typ
  43. | Average of Average.neuron_typ
  44. | Concatenate of Concatenate.neuron_typ
    (*

    Types of neuron.

    *)
val get_in_out_shape : neuron -> int array * int array

Get both input and output shapes of a neuron.

val get_in_shape : neuron -> int array

Get the input shape of a neuron.

val get_out_shape : neuron -> int array

Get the output shape of a neuron.

val connect : int array array -> neuron -> unit

Connect this neuron to others in a neural network.

val init : neuron -> unit

Initialise the neuron and its parameters.

val reset : neuron -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron -> Optimise.Algodiff.t array

Assemble all the trainable parameters in an array, used by Optimise module.

val mkpri : neuron -> Optimise.Algodiff.t array

Assemble all the primal values in an array, used by Optimise module.

val mkadj : neuron -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron -> Optimise.Algodiff.t array -> unit

Update trainable parameters in a neuron, used by Optimise module.

val load_weights : neuron -> Optimise.Algodiff.t array -> unit

Load both trainable and non-trainable parameters into the neuron.

val save_weights : neuron -> Optimise.Algodiff.t array

Assemble both trainable and non-trainable parameters of the neuron.

val copy : neuron -> neuron

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : neuron -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/index.html b/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/index.html deleted file mode 100644 index 96abb3e93..000000000 --- a/owl-base/Owl_neural_generic/Flatten/argument-1-Graph/index.html +++ /dev/null @@ -1,243 +0,0 @@ - -Graph (owl-base.Owl_neural_generic.Flatten.Graph)

Parameter Flatten.Graph

Type definition
type node = {
  1. mutable name : string;
  2. mutable prev : node array;
  3. mutable next : node array;
  4. mutable neuron : Neuron.neuron;
  5. mutable output : Neuron.Optimise.Algodiff.t option;
  6. mutable network : network;
  7. mutable train : bool;
}
and network = {
  1. mutable nnid : string;
  2. mutable size : int;
  3. mutable roots : node array;
  4. mutable outputs : node array;
  5. mutable topo : node array;
}

Type definition of a node and a neural network.

Manipulate networks
val make_network : ?nnid:string -> int -> node array -> node array -> network

Create an empty neural network.

val make_node : - ?name:string -> - ?train:bool -> - node array -> - node array -> - Neuron.neuron -> - Neuron.Optimise.Algodiff.t option -> - network -> - node

Create a node in a neural network.

val get_roots : network -> node array

Get the roots of the neural network.

val get_outputs : network -> node array

Get the outputs of the neural network.

val get_node : network -> string -> node

Get a node in a network with the given name.

val get_network : ?name:string -> node -> network

Get the neural network of a given node associated with.

val outputs : ?name:string -> node array -> network

Get the neural network associated with the given output nodes.

val get_network_name : network -> string

get_network_name n returns the name of the network n.

val set_network_name : network -> string -> unit

set_network_name n s sets the name of the network n to s.

val collect_output : node array -> Neuron.Optimise.Algodiff.t array

Collect the output values of given nodes.

val connect_pair : node -> node -> unit

Connect two nodes in a neural network.

val connect_to_parents : node array -> node -> unit

Connect a node to a list of parents.

val add_node : - ?act_typ:Neuron.Activation.typ -> - network -> - node array -> - node -> - node

Add a node to the given network.

val input_shape : network -> int array

Get input shape of a network (without batch dimension), i.e. shape of input neuron.

val input_shapes : network -> int array array

Get input shapes of a network (without batch dimension), i.e. shape of input neurons.

Interface to optimisation engine
val init : network -> unit

Initialise the network.

val reset : network -> unit

Reset the network, i.e. all the parameters in the neurons.

val mktag : int -> network -> unit

Tag the neurons, used by Algodiff module.

val mkpar : network -> Neuron.Optimise.Algodiff.t array array

Collect the parameters of neurons, used by Optimise module.

val mkpri : network -> Neuron.Optimise.Algodiff.t array array

Collect the primal values of neurons, used by Optimise module.

val mkadj : network -> Neuron.Optimise.Algodiff.t array array

Collect the adjacent values of neurons, used by Optimise module.

val update : network -> Neuron.Optimise.Algodiff.t array array -> unit

Update the parameters of neurons, used by Optimise module.

Execute the computations in all the neurons in a network with the given input.

val run_inputs : - Neuron.Optimise.Algodiff.t array -> - network -> - Neuron.Optimise.Algodiff.t array

Execute the computations in all the neurons in a network with the given inputs.

Run the forward pass of a network.

val forward_inputs : - network -> - Neuron.Optimise.Algodiff.t array -> - Neuron.Optimise.Algodiff.t array * Neuron.Optimise.Algodiff.t array array

Run the forward pass of a network (multi-input/output version).

val backward : - network -> - Neuron.Optimise.Algodiff.t -> - Neuron.Optimise.Algodiff.t array array - * Neuron.Optimise.Algodiff.t array array

Run the backward pass of a network.

val copy : network -> network

Make a deep copy of the given network.

Make a deep copy of the given network, excluding the neurons marked with training = true.

val model_inputs : - network -> - Neuron.Optimise.Algodiff.A.arr array -> - Neuron.Optimise.Algodiff.A.arr array

Make a deep copy of the given network, excluding the neurons marked with training = true.

Create Neurons
val input : ?name:string -> int array -> node

input shape creates an input node for input data. Note that if your network has multiple inputs, you should use inputs instead.

Arguments: * shape: shape of input data.

val inputs : ?names:string array -> int array array -> node array

input shapes creates an array of input nodes for input data.

Arguments: * shapes: array of shapes of input data.

val activation : ?name:string -> Neuron.Activation.typ -> node -> node

Applies an activation function to an output.

Arguments: * activation: name of activation function to use.

val linear : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node

linear ?act_typ units node adds the regular densely-connected NN node to node.

Arguments: * units: Positive integer, dimensionality of the output space. * act_typ: Activation function to use.

val linear_nobias : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node

Similar to linear, but does not use the bias vector.

val embedding : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node

Create a node for embedding neuron.

val recurrent : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node

Create a node for recurrent neuron.

val lstm : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node

lstm units node adds a LSTM node on previous node.

Arguments: * units: Positive integer, dimensionality of the output space.

val gru : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node

gru units node adds a Gated Recurrent Unit node on previous node.

Arguments: * units: Positive integer, dimensionality of the output space.

val conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

conv1d kernel stride node adds a 1D convolution node (e.g. temporal convolution) on previous node.

Arguments: * kernel: int array consists of h, i, o. h specifies the dimension of the 1D convolution window. i and o are the dimensionalities of the input and output space. * stride: int array of 1 integer.

val conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

conv2d kernel stride node adds a 2D convolution node (e.g. spatial convolution over images) on previous node.

Arguments: * kernel: int array consists of w, h, i, o. w and h specify the width and height of the 2D convolution window. i and o are the dimensionality of the input and output space. * stride: int array of 2 integers.

val conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

conv3d kernel stride node adds a 3D convolution node (e.g. spatial convolution over volumes) on previous node.

Arguments: * kernel: int array consists of w, h, d, i, o. w, h, and d specify the 3 dimensionality of the 3D convolution window. i and o are the dimensionality of the input and output space. * stride: int array of 3 integers.

val dilated_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node

dilated_conv1d kernel stride rate node adds a 1D dilated convolution node (e.g. temporal convolution) on previous node.

Arguments: * kernel: int array consists of h, i, o. h specifies the dimension of the 1D convolution window. i and o are the dimensionalities of the input and output space. * stride: int array of 1 integer. * rate: int array of 1 integer.

val dilated_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node

dilated_conv2d kernel stride rate node adds a 2D dilated convolution node (e.g. spatial convolution over images) on previous node.

Arguments: * kernel`: int array consists of w, h, i, o. w and h specify the width and height of the 2D convolution window. i and o`` are the dimensionality of the input and output space. * stride: int array of 2 integers. * rate: int array of 2 integers.

val dilated_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node

dilated_conv3d kernel stride rate node adds a 3D dilated convolution node (e.g. spatial convolution over volumes) on previous node.

Arguments: * kernel: int array consists of w, h, d, i, o. w, h, and d specify the 3 dimensionality of the 3D convolution window. i and o are the dimensionality of the input and output space. * stride: int array of 3 integers. * rate: int array of 3 integers.

val transpose_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

transpose_conv1d kernel stride node adds a 1D transpose convolution node (e.g. temporal convolution) on previous node.

Arguments: * kernel: int array consists of h, i, o. h specifies the dimension of the 1D convolution window. i and o are the dimensionalities of the input and output space. * stride: int array of 1 integer.

val transpose_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

transpose_conv2d kernel stride node adds a 2D transpose convolution node on previous node.

Arguments: * kernel: int array consists of w, h, i, o. w and h specify the width and height of the 2D convolution window. i and o are the dimensionality of the input and output space. * stride: int array of 2 integers.

val transpose_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

transpose_conv3d kernel stride node adds a 3D transpose convolution node (e.g. spatial convolution over volumes) on previous node.

Arguments: * kernel: int array consists of w, h, d, i, o. w, h, and d specify the 3 dimensionality of the 3D convolution window. i and o are the dimensionality of the input and output space. * stride: int array of 3 integers.

val fully_connected : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node

fully_connected outputs node adds a fully connected node to node.

Arguments: * outputs: integer, the number of output units in the node.

val max_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

max_pool1d ~padding ~act_typ pool_size stride node adds a max pooling operation for temporal data to node.

Arguments: * pool_size: Array of one integer, size of the max pooling windows. * stride: Array of one integer, factor by which to downscale.

val max_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

max_pool2d ~padding ~act_typ pool_size stride node adds a max pooling operation for spatial data to node.

Arguments: * pool_size: Array of 2 integers, size of the max pooling windows. * stride: Array of 2 integers, factor by which to downscale.

val avg_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

avg_pool1d ~padding ~act_typ pool_size stride node adds a average pooling operation for temporal data to node.

Arguments: * pool_size: Array of one integer, size of the max pooling windows. * stride: Array of one integer, factor by which to downscale.

val avg_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

avg_pool2d ~padding ~act_typ pool_size stride node adds a average pooling operation for spatial data to node.

Arguments: * pool_size: Array of 2 integers, size of the max pooling windows. * stride: Array of 2 integers, factor by which to downscale.

val global_max_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node

global_max_pool1d adds global max pooling operation for temporal data.

val global_max_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node

global_max_poo2d global max pooling operation for spatial data.

val global_avg_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node

global_avg_pool1d adds global average pooling operation for temporal data.

val global_avg_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node

global_avg_poo2d global average pooling operation for spatial data.

val upsampling2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - node -> - node

upsampling2d ~act_typ size node adds a upsampling operation for spatial data to node.

Arguments: * size: array of two integers, namely the upsampling factors for columns and rows.

val padding2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array array -> - node -> - node

padding2d ~act_typ padding node adds rows and columns of zeros at the top, bottom, left and right side of an image tensor.

Arguments: * padding: array of 2 arrays of 2 integers, interpreted as | [|top_pad; bottom_pad|]; [|left_pad; right_pad|]|.

val dropout : ?name:string -> float -> node -> node

dropout rate node applies Dropout to the input to prevent overfitting.

Arguments: * rate: float between 0 and 1. Fraction of the input units to drop.

val gaussian_noise : ?name:string -> float -> node -> node

gaussian_noise stddev node applies additive zero-centered Gaussian noise.

Arguments: * stddev: float, standard deviation of the noise distribution.

val gaussian_dropout : ?name:string -> float -> node -> node

gaussian_dropout rate node applies multiplicative 1-centered Gaussian noise. Only active at training time.

Arguments: * rates: float, drop probability

val alpha_dropout : ?name:string -> float -> node -> node

alpha_dropout rate node applies Alpha Dropout to the input node. Only active at training time.

Arguments: * rates: float, drop probability

val normalisation : - ?name:string -> - ?axis:int -> - ?training:bool -> - ?decay:float -> - ?mu:Neuron.Optimise.Algodiff.A.arr -> - ?var:Neuron.Optimise.Algodiff.A.arr -> - node -> - node

normalisation axis node normalise the activations of the previous node at each batch.

Arguments: * axis: Integer, the axis that should be normalised (typically the features axis). Default value is 0.

val reshape : ?name:string -> int array -> node -> node

reshape target_shape node reshapes an output to a certain shape.

Arguments: * target_shape: target shape. Array of integers. Does not include the batch axis.

val flatten : ?name:string -> node -> node

flatten node flattens the input. Does not affect the batch size.

val slice : ?name:string -> int list list -> node -> node

slice node slices the input. Does not affect the batch size.

val lambda : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - ?out_shape:int array -> - (Neuron.Optimise.Algodiff.t -> Neuron.Optimise.Algodiff.t) -> - node -> - node

lambda ?target_shape func node wraps arbitrary expression as a Node object.

Arguments: * func: The function to be evaluated. Takes input tensor as first argument. * target_shape: the shape of the tensor returned by func; set to the same as input shape if not specified.

val lambda_array : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - (Neuron.Optimise.Algodiff.t array -> Neuron.Optimise.Algodiff.t) -> - node array -> - node

lambda_array target_shape func node wraps arbitrary expression as a Node object.

Arguments: * target_shape: the shape of the tensor returned by func. * func: The function to be evaluated. Takes input tensor array as first argument.

val add : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node

Node that adds a list of inputs.

It takes as input an array of nodes, all of the same shape, and returns a single node (also of the same shape).

val mul : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node

Node that multiplies (element-wise) a list of inputs.

It takes as input an array of nodes, all of the same shape, and returns a single node (also of the same shape).

val dot : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node

Node that computes a dot product between samples in two nodes.

val max : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node

Node that computes the maximum (element-wise) a list of inputs.

val average : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node array -> - node

Node that averages a list of inputs.

It takes as input an array of nodes, all of the same shape, and returns a single node (also of the same shape).

val concatenate : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int -> - node array -> - node

concatenate axis nodes concatenates a array of nodes and return as a single node.

Arguments: * axis: Axis along which to concatenate.

Helper functions
val to_string : network -> string

Convert a neural network to its string representation.

val pp_network : Stdlib.Format.formatter -> network -> unit

Pretty printing function a neural network.

val print : network -> unit

Print the string representation of a neural network to the standard output.

val save : ?unsafe:bool -> network -> string -> unit

Serialise a network and save it to the a file with the given name. Set the unsafe flag to true if network contains Lambda layer.

val load : string -> network

Load the neural network from a file with the given name.

val save_weights : network -> string -> unit

Save all the weights in a neural network to a file. The weights and the name of their associated neurons are saved as key-value pairs in a hash table.

val load_weights : network -> string -> unit

Load the weights from a file of the given name. Note that the weights and the name of their associated neurons are saved as key-value pairs in a hash table.

val make_subnetwork : - ?copy:bool -> - ?make_inputs:string array -> - network -> - string array -> - network

get_subnetwork ?copy ?make_inputs network output_names constructs a subnetwork of nodes on which output_names depend, replacing nodes with names in make_inputs with input nodes.

Arguments: copy: Whether to copy or reference the original node weights. Defaults to true. make_inputs: Names of nodes to use as inputs to the subnetwork. Defaults to ||, which uses the original inputs. nn: The neural network from which the subnetwork is constructed. output_names: Names of nodes to use as outputs.

Train Networks

Generic function of training a neural network.

Train a neural network with various configurations.

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Flatten/index.html b/owl-base/Owl_neural_generic/Flatten/index.html deleted file mode 100644 index 119677bcc..000000000 --- a/owl-base/Owl_neural_generic/Flatten/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Flatten (owl-base.Owl_neural_generic.Flatten)

Module Owl_neural_generic.Flatten

Parameters

Signature

module Graph = Graph
module Optimise = Graph.Neuron.Optimise
module Init = Graph.Neuron.Init
module Activation = Graph.Neuron.Activation
module Regularisation = Graph.Neuron.Optimise.Regularisation
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Activation/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Activation/index.html deleted file mode 100644 index 7b8438a77..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Activation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Activation (owl-base.Owl_neural_generic.Make.Graph.Neuron.Activation)

Module Neuron.Activation

type typ = Make_Embedded(A).Neuron.Activation.typ =
  1. | Elu
  2. | Relu
  3. | Sigmoid
  4. | HardSigmoid
  5. | Softmax of int
  6. | Softplus
  7. | Softsign
  8. | Tanh
  9. | Relu6
  10. | LeakyRelu of float
  11. | TRelu of float
  12. | Custom of Optimise.Algodiff.t -> Optimise.Algodiff.t
  13. | None
type neuron_typ = Make_Embedded(A).Neuron.Activation.neuron_typ = {
  1. mutable activation : typ;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val run_activation : Optimise.Algodiff.t -> typ -> Optimise.Algodiff.t
val copy : neuron_typ -> neuron_typ
val activation_to_string : typ -> string
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Add/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Add/index.html deleted file mode 100644 index ed4786fe7..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Add/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Add (owl-base.Owl_neural_generic.Make.Graph.Neuron.Add)

Module Neuron.Add

type neuron_typ = Make_Embedded(A).Neuron.Add.neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/AlphaDropout/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/AlphaDropout/index.html deleted file mode 100644 index a0927ae1b..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/AlphaDropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AlphaDropout (owl-base.Owl_neural_generic.Make.Graph.Neuron.AlphaDropout)

Module Neuron.AlphaDropout

type neuron_typ = Make_Embedded(A).Neuron.AlphaDropout.neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Average/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Average/index.html deleted file mode 100644 index b1be88e0c..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Average/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Average (owl-base.Owl_neural_generic.Make.Graph.Neuron.Average)

Module Neuron.Average

type neuron_typ = Make_Embedded(A).Neuron.Average.neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/AvgPool1D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/AvgPool1D/index.html deleted file mode 100644 index 4371c2db3..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/AvgPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AvgPool1D (owl-base.Owl_neural_generic.Make.Graph.Neuron.AvgPool1D)

Module Neuron.AvgPool1D

type neuron_typ = Make_Embedded(A).Neuron.AvgPool1D.neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/AvgPool2D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/AvgPool2D/index.html deleted file mode 100644 index ca7ac8f26..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/AvgPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AvgPool2D (owl-base.Owl_neural_generic.Make.Graph.Neuron.AvgPool2D)

Module Neuron.AvgPool2D

type neuron_typ = Make_Embedded(A).Neuron.AvgPool2D.neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Concatenate/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Concatenate/index.html deleted file mode 100644 index 9f77fa4cb..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Concatenate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Concatenate (owl-base.Owl_neural_generic.Make.Graph.Neuron.Concatenate)

Module Neuron.Concatenate

type neuron_typ = Make_Embedded(A).Neuron.Concatenate.neuron_typ = {
  1. mutable axis : int;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Conv1D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Conv1D/index.html deleted file mode 100644 index 3d76e40d2..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Conv1D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv1D (owl-base.Owl_neural_generic.Make.Graph.Neuron.Conv1D)

Module Neuron.Conv1D

type neuron_typ = Make_Embedded(A).Neuron.Conv1D.neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Conv2D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Conv2D/index.html deleted file mode 100644 index 99633dd95..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Conv2D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv2D (owl-base.Owl_neural_generic.Make.Graph.Neuron.Conv2D)

Module Neuron.Conv2D

type neuron_typ = Make_Embedded(A).Neuron.Conv2D.neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Conv3D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Conv3D/index.html deleted file mode 100644 index 4d85ad202..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Conv3D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv3D (owl-base.Owl_neural_generic.Make.Graph.Neuron.Conv3D)

Module Neuron.Conv3D

type neuron_typ = Make_Embedded(A).Neuron.Conv3D.neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/DilatedConv1D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/DilatedConv1D/index.html deleted file mode 100644 index 315c56cf0..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/DilatedConv1D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv1D (owl-base.Owl_neural_generic.Make.Graph.Neuron.DilatedConv1D)

Module Neuron.DilatedConv1D

type neuron_typ = Make_Embedded(A).Neuron.DilatedConv1D.neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/DilatedConv2D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/DilatedConv2D/index.html deleted file mode 100644 index 9e5108dce..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/DilatedConv2D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv2D (owl-base.Owl_neural_generic.Make.Graph.Neuron.DilatedConv2D)

Module Neuron.DilatedConv2D

type neuron_typ = Make_Embedded(A).Neuron.DilatedConv2D.neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/DilatedConv3D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/DilatedConv3D/index.html deleted file mode 100644 index 844e06fd4..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/DilatedConv3D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv3D (owl-base.Owl_neural_generic.Make.Graph.Neuron.DilatedConv3D)

Module Neuron.DilatedConv3D

type neuron_typ = Make_Embedded(A).Neuron.DilatedConv3D.neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Dot/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Dot/index.html deleted file mode 100644 index becf5b611..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Dot/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Dot (owl-base.Owl_neural_generic.Make.Graph.Neuron.Dot)

Module Neuron.Dot

type neuron_typ = Make_Embedded(A).Neuron.Dot.neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Dropout/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Dropout/index.html deleted file mode 100644 index 772455ea3..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Dropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Dropout (owl-base.Owl_neural_generic.Make.Graph.Neuron.Dropout)

Module Neuron.Dropout

type neuron_typ = Make_Embedded(A).Neuron.Dropout.neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Embedding/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Embedding/index.html deleted file mode 100644 index 7011f6cfd..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Embedding/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Embedding (owl-base.Owl_neural_generic.Make.Graph.Neuron.Embedding)

Module Neuron.Embedding

type neuron_typ = Make_Embedded(A).Neuron.Embedding.neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_dim : int;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Flatten/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Flatten/index.html deleted file mode 100644 index b12f2eb9f..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Flatten/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Flatten (owl-base.Owl_neural_generic.Make.Graph.Neuron.Flatten)

Module Neuron.Flatten

type neuron_typ = Make_Embedded(A).Neuron.Flatten.neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/FullyConnected/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/FullyConnected/index.html deleted file mode 100644 index ccc48609a..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/FullyConnected/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -FullyConnected (owl-base.Owl_neural_generic.Make.Graph.Neuron.FullyConnected)

Module Neuron.FullyConnected

type neuron_typ = Make_Embedded(A).Neuron.FullyConnected.neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/GRU/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/GRU/index.html deleted file mode 100644 index ae0b7de82..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/GRU/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GRU (owl-base.Owl_neural_generic.Make.Graph.Neuron.GRU)

Module Neuron.GRU

type neuron_typ = Make_Embedded(A).Neuron.GRU.neuron_typ = {
  1. mutable wxz : Optimise.Algodiff.t;
  2. mutable whz : Optimise.Algodiff.t;
  3. mutable wxr : Optimise.Algodiff.t;
  4. mutable whr : Optimise.Algodiff.t;
  5. mutable wxh : Optimise.Algodiff.t;
  6. mutable whh : Optimise.Algodiff.t;
  7. mutable bz : Optimise.Algodiff.t;
  8. mutable br : Optimise.Algodiff.t;
  9. mutable bh : Optimise.Algodiff.t;
  10. mutable h : Optimise.Algodiff.t;
  11. mutable init_typ : Init.typ;
  12. mutable in_shape : int array;
  13. mutable out_shape : int array;
}
val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/GaussianDropout/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/GaussianDropout/index.html deleted file mode 100644 index a95651516..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/GaussianDropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GaussianDropout (owl-base.Owl_neural_generic.Make.Graph.Neuron.GaussianDropout)

Module Neuron.GaussianDropout

type neuron_typ = Make_Embedded(A).Neuron.GaussianDropout.neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/GaussianNoise/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/GaussianNoise/index.html deleted file mode 100644 index 6ed0c1712..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/GaussianNoise/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GaussianNoise (owl-base.Owl_neural_generic.Make.Graph.Neuron.GaussianNoise)

Module Neuron.GaussianNoise

type neuron_typ = Make_Embedded(A).Neuron.GaussianNoise.neuron_typ = {
  1. mutable sigma : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalAvgPool1D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalAvgPool1D/index.html deleted file mode 100644 index 5645e627c..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalAvgPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalAvgPool1D (owl-base.Owl_neural_generic.Make.Graph.Neuron.GlobalAvgPool1D)

Module Neuron.GlobalAvgPool1D

type neuron_typ = Make_Embedded(A).Neuron.GlobalAvgPool1D.neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalAvgPool2D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalAvgPool2D/index.html deleted file mode 100644 index 4a437669e..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalAvgPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalAvgPool2D (owl-base.Owl_neural_generic.Make.Graph.Neuron.GlobalAvgPool2D)

Module Neuron.GlobalAvgPool2D

type neuron_typ = Make_Embedded(A).Neuron.GlobalAvgPool2D.neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalMaxPool1D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalMaxPool1D/index.html deleted file mode 100644 index 15265e6b4..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalMaxPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalMaxPool1D (owl-base.Owl_neural_generic.Make.Graph.Neuron.GlobalMaxPool1D)

Module Neuron.GlobalMaxPool1D

type neuron_typ = Make_Embedded(A).Neuron.GlobalMaxPool1D.neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalMaxPool2D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalMaxPool2D/index.html deleted file mode 100644 index 64936f970..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/GlobalMaxPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalMaxPool2D (owl-base.Owl_neural_generic.Make.Graph.Neuron.GlobalMaxPool2D)

Module Neuron.GlobalMaxPool2D

type neuron_typ = Make_Embedded(A).Neuron.GlobalMaxPool2D.neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Init/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Init/index.html deleted file mode 100644 index eb284a5eb..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Init/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Init (owl-base.Owl_neural_generic.Make.Graph.Neuron.Init)

Module Neuron.Init

type typ = Make_Embedded(A).Neuron.Init.typ =
  1. | Uniform of float * float
  2. | Gaussian of float * float
  3. | Standard
  4. | Tanh
  5. | GlorotNormal
  6. | GlorotUniform
  7. | LecunNormal
  8. | HeNormal
  9. | Custom of int array -> Optimise.Algodiff.t
val calc_fans : int array -> float * float
val run : typ -> int array -> Optimise.Algodiff.t -> Optimise.Algodiff.t
val to_string : typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Input/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Input/index.html deleted file mode 100644 index ea826f586..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Input/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Input (owl-base.Owl_neural_generic.Make.Graph.Neuron.Input)

Module Neuron.Input

type neuron_typ = Make_Embedded(A).Neuron.Input.neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : int array -> neuron_typ
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/LSTM/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/LSTM/index.html deleted file mode 100644 index ff1d11140..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/LSTM/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -LSTM (owl-base.Owl_neural_generic.Make.Graph.Neuron.LSTM)

Module Neuron.LSTM

type neuron_typ = Make_Embedded(A).Neuron.LSTM.neuron_typ = {
  1. mutable wxi : Optimise.Algodiff.t;
  2. mutable whi : Optimise.Algodiff.t;
  3. mutable wxc : Optimise.Algodiff.t;
  4. mutable whc : Optimise.Algodiff.t;
  5. mutable wxf : Optimise.Algodiff.t;
  6. mutable whf : Optimise.Algodiff.t;
  7. mutable wxo : Optimise.Algodiff.t;
  8. mutable who : Optimise.Algodiff.t;
  9. mutable bi : Optimise.Algodiff.t;
  10. mutable bc : Optimise.Algodiff.t;
  11. mutable bf : Optimise.Algodiff.t;
  12. mutable bo : Optimise.Algodiff.t;
  13. mutable c : Optimise.Algodiff.t;
  14. mutable h : Optimise.Algodiff.t;
  15. mutable init_typ : Init.typ;
  16. mutable in_shape : int array;
  17. mutable out_shape : int array;
}
val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Lambda/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Lambda/index.html deleted file mode 100644 index 596bb9b62..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Lambda/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Lambda (owl-base.Owl_neural_generic.Make.Graph.Neuron.Lambda)

Module Neuron.Lambda

type neuron_typ = Make_Embedded(A).Neuron.Lambda.neuron_typ = {
  1. mutable lambda : Optimise.Algodiff.t -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : - ?out_shape:int array -> - (Optimise.Algodiff.t -> Optimise.Algodiff.t) -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/LambdaArray/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/LambdaArray/index.html deleted file mode 100644 index 8b17d5cd7..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/LambdaArray/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -LambdaArray (owl-base.Owl_neural_generic.Make.Graph.Neuron.LambdaArray)

Module Neuron.LambdaArray

type neuron_typ = Make_Embedded(A).Neuron.LambdaArray.neuron_typ = {
  1. mutable lambda : Optimise.Algodiff.t array -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : - int array -> - (Optimise.Algodiff.t array -> Optimise.Algodiff.t) -> - neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Linear/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Linear/index.html deleted file mode 100644 index 3d2fd4105..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Linear/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Linear (owl-base.Owl_neural_generic.Make.Graph.Neuron.Linear)

Module Neuron.Linear

type neuron_typ = Make_Embedded(A).Neuron.Linear.neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/LinearNoBias/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/LinearNoBias/index.html deleted file mode 100644 index af15bce2e..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/LinearNoBias/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -LinearNoBias (owl-base.Owl_neural_generic.Make.Graph.Neuron.LinearNoBias)

Module Neuron.LinearNoBias

type neuron_typ = Make_Embedded(A).Neuron.LinearNoBias.neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_shape : int array;
  4. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Masking/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Masking/index.html deleted file mode 100644 index 0c7aca218..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Masking/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Masking (owl-base.Owl_neural_generic.Make.Graph.Neuron.Masking)

Module Neuron.Masking

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Max/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Max/index.html deleted file mode 100644 index 2dd3b898b..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Max/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Max (owl-base.Owl_neural_generic.Make.Graph.Neuron.Max)

Module Neuron.Max

type neuron_typ = Make_Embedded(A).Neuron.Max.neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/MaxPool1D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/MaxPool1D/index.html deleted file mode 100644 index ca8b3bbd4..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/MaxPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MaxPool1D (owl-base.Owl_neural_generic.Make.Graph.Neuron.MaxPool1D)

Module Neuron.MaxPool1D

type neuron_typ = Make_Embedded(A).Neuron.MaxPool1D.neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/MaxPool2D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/MaxPool2D/index.html deleted file mode 100644 index 04a65ee3b..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/MaxPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MaxPool2D (owl-base.Owl_neural_generic.Make.Graph.Neuron.MaxPool2D)

Module Neuron.MaxPool2D

type neuron_typ = Make_Embedded(A).Neuron.MaxPool2D.neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Mul/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Mul/index.html deleted file mode 100644 index d90ce5ce7..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Mul/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mul (owl-base.Owl_neural_generic.Make.Graph.Neuron.Mul)

Module Neuron.Mul

type neuron_typ = Make_Embedded(A).Neuron.Mul.neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Normalisation/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Normalisation/index.html deleted file mode 100644 index 470e83dfe..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Normalisation/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Normalisation (owl-base.Owl_neural_generic.Make.Graph.Neuron.Normalisation)

Module Neuron.Normalisation

type neuron_typ = Make_Embedded(A).Neuron.Normalisation.neuron_typ = {
  1. mutable axis : int;
  2. mutable beta : Optimise.Algodiff.t;
  3. mutable gamma : Optimise.Algodiff.t;
  4. mutable mu : Optimise.Algodiff.t;
  5. mutable var : Optimise.Algodiff.t;
  6. mutable decay : Optimise.Algodiff.t;
  7. mutable training : bool;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?training:bool -> - ?decay:float -> - ?mu:Optimise.Algodiff.A.arr -> - ?var:Optimise.Algodiff.A.arr -> - int -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val load_weights : neuron_typ -> Optimise.Algodiff.t array -> unit
val save_weights : neuron_typ -> Optimise.Algodiff.t array
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index 5016d63c2..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index 6fc6e1dec..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index 5727d38b1..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/index.html deleted file mode 100644 index 2779035d1..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.A)

Module Algodiff.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Arr/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Arr/index.html deleted file mode 100644 index 252479483..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/index.html deleted file mode 100644 index 140bc7d19..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.Builder)

Module Algodiff.Builder

module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t
module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t
module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t
module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array
module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t
module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index 249acb783..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index b0a6638cf..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index 22e250b10..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index bde567169..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index 7c7d28a7b..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index ff0477539..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Linalg/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index f24afee8f..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t
val logdet : t -> t
val chol : ?upper:bool -> t -> t
val qr : t -> t * t
val lq : t -> t * t
val svd : ?thin:bool -> t -> t * t * t
val sylvester : t -> t -> t -> t
val lyapunov : t -> t -> t
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - t -> - t -> - t
val (/@) : t -> t -> t
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> t -> t -> t
val care : ?diag_r:bool -> t -> t -> t -> t -> t
val dare : ?diag_r:bool -> t -> t -> t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Mat/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Mat/index.html deleted file mode 100644 index c54e72778..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Maths/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Maths/index.html deleted file mode 100644 index 69d565f89..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t
val (-) : t -> t -> t
val (*) : t -> t -> t
val (/) : t -> t -> t
val (*@) : t -> t -> t
val (**) : t -> t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val kron : t -> t -> t
val dot : t -> t -> t
val pow : t -> t -> t
val atan2 : t -> t -> t
val min2 : t -> t -> t
val max2 : t -> t -> t
val cross_entropy : t -> t -> t
val inv : t -> t
val neg : t -> t
val abs : t -> t
val signum : t -> t
val floor : t -> t
val ceil : t -> t
val round : t -> t
val sqr : t -> t
val sqrt : t -> t
val log : t -> t
val log2 : t -> t
val log10 : t -> t
val exp : t -> t
val sin : t -> t
val cos : t -> t
val tan : t -> t
val sinh : t -> t
val cosh : t -> t
val tanh : t -> t
val asin : t -> t
val acos : t -> t
val atan : t -> t
val asinh : t -> t
val acosh : t -> t
val atanh : t -> t
val sum' : t -> t
val log_sum_exp' : t -> t
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t
val sum : ?axis:int -> ?keep_dims:bool -> t -> t
val sum_reduce : ?axis:int array -> t -> t
val mean : t -> t
val transpose : ?axis:int array -> t -> t
val swap : int -> int -> t -> t
val l1norm' : t -> t
val l2norm' : t -> t
val l2norm_sqr' : t -> t
val sigmoid : t -> t
val relu : t -> t
val dawsn : t -> t
val softplus : t -> t
val softsign : t -> t
val softmax : ?axis:int -> t -> t
val reshape : t -> int array -> t
val flatten : t -> t
val get_item : t -> int -> int -> t
val get_row : t -> int -> t
val concat : axis:int -> t -> t -> t
val split : axis:int -> int array -> t -> t array
val of_arrays : t array array -> t
val to_arrays : t -> t array array
val concatenate : axis:int -> t array -> t
val stack : axis:int -> t array -> t
val get_slice : int list list -> t -> t
val set_slice : int list list -> t -> t -> t
val get_fancy : Owl_types.index list -> t -> t
val set_fancy : Owl_types.index list -> t -> t -> t
val diag : ?k:int -> t -> t
val diagm : ?k:int -> t -> t
val trace : t -> t
val triu : ?k:int -> t -> t
val tril : ?k:int -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/NN/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/NN/index.html deleted file mode 100644 index fd7e18738..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t
val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val upsampling2d : t -> int array -> t
val pad : ?v:A.elt -> int list list -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/index.html deleted file mode 100644 index 8bda7b926..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Algodiff/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Algodiff (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Algodiff)

Module Optimise.Algodiff

module A : sig ... end
type t = Make_Embedded(A).Neuron.Optimise.Algodiff.t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
val tag : unit -> int
val primal : t -> t
val primal' : t -> t
val zero : t -> t
val reset_zero : t -> t
val tangent : t -> t
val adjref : t -> t Stdlib.ref
val adjval : t -> t
val shape : t -> int array
val is_float : t -> bool
val is_arr : t -> bool
val row_num : t -> int
val col_num : t -> int
val numel : t -> int
val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t
val clip_by_l2norm : A.elt -> t -> t
val copy_primal' : t -> t
val tile : t -> int array -> t
val repeat : t -> int array -> t
val pack_elt : A.elt -> t
val unpack_elt : t -> A.elt
val pack_flt : float -> t
val _f : float -> t
val unpack_flt : t -> float
val pack_arr : A.arr -> t
val unpack_arr : t -> A.arr
val deep_info : t -> string
val type_info : t -> string
val error_binop : string -> t -> t -> 'a
val error_uniop : string -> t -> 'a
val make_forward : t -> t -> int -> t
val make_reverse : t -> int -> t
val reverse_prop : t -> t -> unit
val diff : (t -> t) -> t -> t
val diff' : (t -> t) -> t -> t * t
val grad : (t -> t) -> t -> t
val grad' : (t -> t) -> t -> t * t
val jacobian : (t -> t) -> t -> t
val jacobian' : (t -> t) -> t -> t * t
val jacobianv : (t -> t) -> t -> t -> t
val jacobianv' : (t -> t) -> t -> t -> t * t
val jacobianTv : (t -> t) -> t -> t -> t
val jacobianTv' : (t -> t) -> t -> t -> t * t
val hessian : (t -> t) -> t -> t
val hessian' : (t -> t) -> t -> t * t
val hessianv : (t -> t) -> t -> t -> t
val hessianv' : (t -> t) -> t -> t -> t * t
val laplacian : (t -> t) -> t -> t
val laplacian' : (t -> t) -> t -> t * t
val gradhessian : (t -> t) -> t -> t * t
val gradhessian' : (t -> t) -> t -> t * t * t
val gradhessianv : (t -> t) -> t -> t -> t * t
val gradhessianv' : (t -> t) -> t -> t -> t * t * t
module Builder : sig ... end
module Maths : sig ... end
module Linalg : sig ... end
module NN : sig ... end
module Mat : sig ... end
module Arr : sig ... end
val to_trace : t list -> string
val to_dot : t list -> string
val pp_num : Stdlib.Format.formatter -> t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Batch/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Batch/index.html deleted file mode 100644 index c5ea7e8a6..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Batch/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Batch (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Batch)

Module Optimise.Batch

type typ = Make_Embedded(A).Neuron.Optimise.Batch.typ =
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic
val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val batches : typ -> Algodiff.t -> int
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Checkpoint/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Checkpoint/index.html deleted file mode 100644 index df78227b5..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Checkpoint/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Checkpoint (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Checkpoint)

Module Optimise.Checkpoint

type state = Make_Embedded(A).Neuron.Optimise.Checkpoint.state = {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}
type typ = Make_Embedded(A).Neuron.Optimise.Checkpoint.typ =
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None
val init_state : int -> float -> state
val default_checkpoint_fun : (string -> 'a) -> 'a
val print_state_info : state -> unit
val print_summary : state -> unit
val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Clipping/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Clipping/index.html deleted file mode 100644 index cc0784df2..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Clipping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Clipping (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Clipping)

Module Optimise.Clipping

type typ = Make_Embedded(A).Neuron.Optimise.Clipping.typ =
  1. | L2norm of float
  2. | Value of float * float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Gradient/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Gradient/index.html deleted file mode 100644 index e74dd268c..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Gradient/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Gradient (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Gradient)

Module Optimise.Gradient

type typ = Make_Embedded(A).Neuron.Optimise.Gradient.typ =
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton
val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Learning_Rate/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Learning_Rate/index.html deleted file mode 100644 index 38f7fbbbd..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Learning_Rate (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

type typ = Make_Embedded(A).Neuron.Optimise.Learning_Rate.typ =
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t
val default : typ -> typ
val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Loss/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Loss/index.html deleted file mode 100644 index 86fd5e272..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Loss/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Loss (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Loss)

Module Optimise.Loss

type typ = Make_Embedded(A).Neuron.Optimise.Loss.typ =
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Momentum/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Momentum/index.html deleted file mode 100644 index ae4712c10..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Momentum/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Momentum (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Momentum)

Module Optimise.Momentum

type typ = Make_Embedded(A).Neuron.Optimise.Momentum.typ =
  1. | Standard of float
  2. | Nesterov of float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Params/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Params/index.html deleted file mode 100644 index 733049cd8..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Params/index.html +++ /dev/null @@ -1,14 +0,0 @@ - -Params (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Params)

Module Optimise.Params

type typ = Make_Embedded(A).Neuron.Optimise.Params.typ = {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}
val default : unit -> typ
val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Regularisation/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Regularisation/index.html deleted file mode 100644 index a67c516a9..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Regularisation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Regularisation (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Regularisation)

Module Optimise.Regularisation

type typ = Make_Embedded(A).Neuron.Optimise.Regularisation.typ =
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None
val run : typ -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Stopping/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Stopping/index.html deleted file mode 100644 index c219f1bb7..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Stopping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Stopping (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Stopping)

Module Optimise.Stopping

type typ = Make_Embedded(A).Neuron.Optimise.Stopping.typ =
  1. | Const of float
  2. | Early of int * int
  3. | None
val run : typ -> float -> bool
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Utils/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Utils/index.html deleted file mode 100644 index 0e214a85d..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise.Utils)

Module Optimise.Utils

val sample_num : Algodiff.t -> int
val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/index.html deleted file mode 100644 index 9b1ce3b1e..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl-base.Owl_neural_generic.Make.Graph.Neuron.Optimise)

Module Neuron.Optimise

module Algodiff : sig ... end
module Utils : sig ... end
module Learning_Rate : sig ... end
module Batch : sig ... end
module Loss : sig ... end
module Gradient : sig ... end
module Momentum : sig ... end
module Regularisation : sig ... end
module Clipping : sig ... end
module Stopping : sig ... end
module Checkpoint : sig ... end
module Params : sig ... end
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Padding1D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Padding1D/index.html deleted file mode 100644 index 054a3e60f..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Padding1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding1D (owl-base.Owl_neural_generic.Make.Graph.Neuron.Padding1D)

Module Neuron.Padding1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Padding2D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Padding2D/index.html deleted file mode 100644 index 4f2a285f3..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Padding2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding2D (owl-base.Owl_neural_generic.Make.Graph.Neuron.Padding2D)

Module Neuron.Padding2D

type neuron_typ = Make_Embedded(A).Neuron.Padding2D.neuron_typ = {
  1. mutable padding : int array array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int array array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Padding3D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Padding3D/index.html deleted file mode 100644 index 18668982b..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Padding3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding3D (owl-base.Owl_neural_generic.Make.Graph.Neuron.Padding3D)

Module Neuron.Padding3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Recurrent/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Recurrent/index.html deleted file mode 100644 index 95f068ad3..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Recurrent/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Recurrent (owl-base.Owl_neural_generic.Make.Graph.Neuron.Recurrent)

Module Neuron.Recurrent

type neuron_typ = Make_Embedded(A).Neuron.Recurrent.neuron_typ = {
  1. mutable whh : Optimise.Algodiff.t;
  2. mutable wxh : Optimise.Algodiff.t;
  3. mutable why : Optimise.Algodiff.t;
  4. mutable bh : Optimise.Algodiff.t;
  5. mutable by : Optimise.Algodiff.t;
  6. mutable h : Optimise.Algodiff.t;
  7. mutable hiddens : int;
  8. mutable act : Activation.typ;
  9. mutable init_typ : Init.typ;
  10. mutable in_shape : int array;
  11. mutable out_shape : int array;
}
val create : - ?time_steps:int -> - ?inputs:int -> - int -> - int -> - Activation.typ -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Reshape/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Reshape/index.html deleted file mode 100644 index 81a0d866c..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Reshape/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Reshape (owl-base.Owl_neural_generic.Make.Graph.Neuron.Reshape)

Module Neuron.Reshape

type neuron_typ = Make_Embedded(A).Neuron.Reshape.neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : ?inputs:int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Slice/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/Slice/index.html deleted file mode 100644 index 80d2df3ef..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/Slice/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Slice (owl-base.Owl_neural_generic.Make.Graph.Neuron.Slice)

Module Neuron.Slice

type neuron_typ = Make_Embedded(A).Neuron.Slice.neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
  3. mutable slice : int list list;
}
val create : int list list -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/TransposeConv1D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/TransposeConv1D/index.html deleted file mode 100644 index 14766ef7b..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/TransposeConv1D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv1D (owl-base.Owl_neural_generic.Make.Graph.Neuron.TransposeConv1D)

Module Neuron.TransposeConv1D

type neuron_typ = Make_Embedded(A).Neuron.TransposeConv1D.neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/TransposeConv2D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/TransposeConv2D/index.html deleted file mode 100644 index ee8f08baf..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/TransposeConv2D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv2D (owl-base.Owl_neural_generic.Make.Graph.Neuron.TransposeConv2D)

Module Neuron.TransposeConv2D

type neuron_typ = Make_Embedded(A).Neuron.TransposeConv2D.neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/TransposeConv3D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/TransposeConv3D/index.html deleted file mode 100644 index 399845fa6..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/TransposeConv3D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv3D (owl-base.Owl_neural_generic.Make.Graph.Neuron.TransposeConv3D)

Module Neuron.TransposeConv3D

type neuron_typ = Make_Embedded(A).Neuron.TransposeConv3D.neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/UpSampling1D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/UpSampling1D/index.html deleted file mode 100644 index 3be484628..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/UpSampling1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling1D (owl-base.Owl_neural_generic.Make.Graph.Neuron.UpSampling1D)

Module Neuron.UpSampling1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/UpSampling2D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/UpSampling2D/index.html deleted file mode 100644 index 87ac603f4..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/UpSampling2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling2D (owl-base.Owl_neural_generic.Make.Graph.Neuron.UpSampling2D)

Module Neuron.UpSampling2D

type neuron_typ = Make_Embedded(A).Neuron.UpSampling2D.neuron_typ = {
  1. mutable size : int array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/UpSampling3D/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/UpSampling3D/index.html deleted file mode 100644 index 4a5b0123a..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/UpSampling3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling3D (owl-base.Owl_neural_generic.Make.Graph.Neuron.UpSampling3D)

Module Neuron.UpSampling3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/Neuron/index.html b/owl-base/Owl_neural_generic/Make/Graph/Neuron/index.html deleted file mode 100644 index d1fdf1f88..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/Neuron/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Neuron (owl-base.Owl_neural_generic.Make.Graph.Neuron)

Module Graph.Neuron

module Optimise : sig ... end
module Init : sig ... end
module Input : sig ... end
module Activation : sig ... end
module Linear : sig ... end
module LinearNoBias : sig ... end
module Recurrent : sig ... end
module LSTM : sig ... end
module GRU : sig ... end
module Conv1D : sig ... end
module Conv2D : sig ... end
module Conv3D : sig ... end
module DilatedConv1D : sig ... end
module DilatedConv2D : sig ... end
module DilatedConv3D : sig ... end
module TransposeConv1D : sig ... end
module TransposeConv2D : sig ... end
module TransposeConv3D : sig ... end
module FullyConnected : sig ... end
module MaxPool1D : sig ... end
module MaxPool2D : sig ... end
module AvgPool1D : sig ... end
module AvgPool2D : sig ... end
module GlobalMaxPool1D : sig ... end
module GlobalMaxPool2D : sig ... end
module GlobalAvgPool1D : sig ... end
module GlobalAvgPool2D : sig ... end
module UpSampling1D : sig ... end
module UpSampling2D : sig ... end
module UpSampling3D : sig ... end
module Padding1D : sig ... end
module Padding2D : sig ... end
module Padding3D : sig ... end
module Lambda : sig ... end
module LambdaArray : sig ... end
module Dropout : sig ... end
module Reshape : sig ... end
module Flatten : sig ... end
module Slice : sig ... end
module Add : sig ... end
module Mul : sig ... end
module Dot : sig ... end
module Max : sig ... end
module Average : sig ... end
module Concatenate : sig ... end
module Normalisation : sig ... end
module GaussianNoise : sig ... end
module GaussianDropout : sig ... end
module AlphaDropout : sig ... end
module Embedding : sig ... end
module Masking : sig ... end
type neuron = Make_Embedded(A).Neuron.neuron =
  1. | Input of Input.neuron_typ
  2. | Linear of Linear.neuron_typ
  3. | LinearNoBias of LinearNoBias.neuron_typ
  4. | Embedding of Embedding.neuron_typ
  5. | LSTM of LSTM.neuron_typ
  6. | GRU of GRU.neuron_typ
  7. | Recurrent of Recurrent.neuron_typ
  8. | Conv1D of Conv1D.neuron_typ
  9. | Conv2D of Conv2D.neuron_typ
  10. | Conv3D of Conv3D.neuron_typ
  11. | DilatedConv1D of DilatedConv1D.neuron_typ
  12. | DilatedConv2D of DilatedConv2D.neuron_typ
  13. | DilatedConv3D of DilatedConv3D.neuron_typ
  14. | TransposeConv1D of TransposeConv1D.neuron_typ
  15. | TransposeConv2D of TransposeConv2D.neuron_typ
  16. | TransposeConv3D of TransposeConv3D.neuron_typ
  17. | FullyConnected of FullyConnected.neuron_typ
  18. | MaxPool1D of MaxPool1D.neuron_typ
  19. | MaxPool2D of MaxPool2D.neuron_typ
  20. | AvgPool1D of AvgPool1D.neuron_typ
  21. | AvgPool2D of AvgPool2D.neuron_typ
  22. | GlobalMaxPool1D of GlobalMaxPool1D.neuron_typ
  23. | GlobalMaxPool2D of GlobalMaxPool2D.neuron_typ
  24. | GlobalAvgPool1D of GlobalAvgPool1D.neuron_typ
  25. | GlobalAvgPool2D of GlobalAvgPool2D.neuron_typ
  26. | UpSampling2D of UpSampling2D.neuron_typ
  27. | Padding2D of Padding2D.neuron_typ
  28. | Dropout of Dropout.neuron_typ
  29. | Reshape of Reshape.neuron_typ
  30. | Flatten of Flatten.neuron_typ
  31. | Slice of Slice.neuron_typ
  32. | Lambda of Lambda.neuron_typ
  33. | LambdaArray of LambdaArray.neuron_typ
  34. | Activation of Activation.neuron_typ
  35. | GaussianNoise of GaussianNoise.neuron_typ
  36. | GaussianDropout of GaussianDropout.neuron_typ
  37. | AlphaDropout of AlphaDropout.neuron_typ
  38. | Normalisation of Normalisation.neuron_typ
  39. | Add of Add.neuron_typ
  40. | Mul of Mul.neuron_typ
  41. | Dot of Dot.neuron_typ
  42. | Max of Max.neuron_typ
  43. | Average of Average.neuron_typ
  44. | Concatenate of Concatenate.neuron_typ
val get_in_out_shape : neuron -> int array * int array
val get_in_shape : neuron -> int array
val get_out_shape : neuron -> int array
val connect : int array array -> neuron -> unit
val init : neuron -> unit
val reset : neuron -> unit
val mktag : int -> neuron -> unit
val mkpar : neuron -> Optimise.Algodiff.t array
val mkpri : neuron -> Optimise.Algodiff.t array
val mkadj : neuron -> Optimise.Algodiff.t array
val update : neuron -> Optimise.Algodiff.t array -> unit
val load_weights : neuron -> Optimise.Algodiff.t array -> unit
val save_weights : neuron -> Optimise.Algodiff.t array
val copy : neuron -> neuron
val to_string : neuron -> string
val to_name : neuron -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/Graph/index.html b/owl-base/Owl_neural_generic/Make/Graph/index.html deleted file mode 100644 index 16d5f28ab..000000000 --- a/owl-base/Owl_neural_generic/Make/Graph/index.html +++ /dev/null @@ -1,243 +0,0 @@ - -Graph (owl-base.Owl_neural_generic.Make.Graph)

Module Make.Graph

module Neuron : sig ... end
type node = Make_Embedded(A).node = {
  1. mutable name : string;
  2. mutable prev : node array;
  3. mutable next : node array;
  4. mutable neuron : Neuron.neuron;
  5. mutable output : Neuron.Optimise.Algodiff.t option;
  6. mutable network : network;
  7. mutable train : bool;
}
and network = Make_Embedded(A).network = {
  1. mutable nnid : string;
  2. mutable size : int;
  3. mutable roots : node array;
  4. mutable outputs : node array;
  5. mutable topo : node array;
}
val make_network : ?nnid:string -> int -> node array -> node array -> network
val make_node : - ?name:string -> - ?train:bool -> - node array -> - node array -> - Neuron.neuron -> - Neuron.Optimise.Algodiff.t option -> - network -> - node
val get_roots : network -> node array
val get_outputs : network -> node array
val get_node : network -> string -> node
val get_network : ?name:string -> node -> network
val outputs : ?name:string -> node array -> network
val get_network_name : network -> string
val set_network_name : network -> string -> unit
val collect_output : node array -> Neuron.Optimise.Algodiff.t array
val connect_pair : node -> node -> unit
val connect_to_parents : node array -> node -> unit
val add_node : - ?act_typ:Neuron.Activation.typ -> - network -> - node array -> - node -> - node
val input_shape : network -> int array
val input_shapes : network -> int array array
val init : network -> unit
val reset : network -> unit
val mktag : int -> network -> unit
val mkpar : network -> Neuron.Optimise.Algodiff.t array array
val mkpri : network -> Neuron.Optimise.Algodiff.t array array
val mkadj : network -> Neuron.Optimise.Algodiff.t array array
val update : network -> Neuron.Optimise.Algodiff.t array array -> unit
val run_inputs : - Neuron.Optimise.Algodiff.t array -> - network -> - Neuron.Optimise.Algodiff.t array
val forward_inputs : - network -> - Neuron.Optimise.Algodiff.t array -> - Neuron.Optimise.Algodiff.t array * Neuron.Optimise.Algodiff.t array array
val backward : - network -> - Neuron.Optimise.Algodiff.t -> - Neuron.Optimise.Algodiff.t array array - * Neuron.Optimise.Algodiff.t array array
val copy : network -> network
val model_inputs : - network -> - Neuron.Optimise.Algodiff.A.arr array -> - Neuron.Optimise.Algodiff.A.arr array
val input : ?name:string -> int array -> node
val inputs : ?names:string array -> int array array -> node array
val activation : ?name:string -> Neuron.Activation.typ -> node -> node
val linear : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val linear_nobias : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val embedding : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node
val recurrent : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node
val lstm : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node
val gru : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node
val conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val dilated_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val dilated_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val dilated_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val transpose_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val transpose_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val transpose_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val fully_connected : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val max_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val max_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val avg_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val avg_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val global_max_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_max_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_avg_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_avg_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val upsampling2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - node -> - node
val padding2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array array -> - node -> - node
val dropout : ?name:string -> float -> node -> node
val gaussian_noise : ?name:string -> float -> node -> node
val gaussian_dropout : ?name:string -> float -> node -> node
val alpha_dropout : ?name:string -> float -> node -> node
val normalisation : - ?name:string -> - ?axis:int -> - ?training:bool -> - ?decay:float -> - ?mu:Neuron.Optimise.Algodiff.A.arr -> - ?var:Neuron.Optimise.Algodiff.A.arr -> - node -> - node
val reshape : ?name:string -> int array -> node -> node
val flatten : ?name:string -> node -> node
val slice : ?name:string -> int list list -> node -> node
val lambda : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - ?out_shape:int array -> - (Neuron.Optimise.Algodiff.t -> Neuron.Optimise.Algodiff.t) -> - node -> - node
val lambda_array : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - (Neuron.Optimise.Algodiff.t array -> Neuron.Optimise.Algodiff.t) -> - node array -> - node
val add : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val mul : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val dot : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val max : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val average : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node array -> - node
val concatenate : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int -> - node array -> - node
val to_string : network -> string
val pp_network : Stdlib.Format.formatter -> network -> unit
val print : network -> unit
val save : ?unsafe:bool -> network -> string -> unit
val load : string -> network
val save_weights : network -> string -> unit
val load_weights : network -> string -> unit
val make_subnetwork : - ?copy:bool -> - ?make_inputs:string array -> - network -> - string array -> - network
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/argument-1-A/Linalg/index.html b/owl-base/Owl_neural_generic/Make/argument-1-A/Linalg/index.html deleted file mode 100644 index 935b7b0c2..000000000 --- a/owl-base/Owl_neural_generic/Make/argument-1-A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_generic.Make.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/argument-1-A/Mat/index.html b/owl-base/Owl_neural_generic/Make/argument-1-A/Mat/index.html deleted file mode 100644 index 4fa24cd30..000000000 --- a/owl-base/Owl_neural_generic/Make/argument-1-A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_generic.Make.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/argument-1-A/Scalar/index.html b/owl-base/Owl_neural_generic/Make/argument-1-A/Scalar/index.html deleted file mode 100644 index 933d01a87..000000000 --- a/owl-base/Owl_neural_generic/Make/argument-1-A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_neural_generic.Make.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/argument-1-A/index.html b/owl-base/Owl_neural_generic/Make/argument-1-A/index.html deleted file mode 100644 index 50cfe11b4..000000000 --- a/owl-base/Owl_neural_generic/Make/argument-1-A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_neural_generic.Make.A)

Parameter Make.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make/index.html b/owl-base/Owl_neural_generic/Make/index.html deleted file mode 100644 index 689b408ae..000000000 --- a/owl-base/Owl_neural_generic/Make/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make (owl-base.Owl_neural_generic.Make)

Module Owl_neural_generic.Make

Parameters

Signature

include sig ... end
module Graph : sig ... end
module Optimise = Graph.Neuron.Optimise
module Init = Graph.Neuron.Init
module Activation = Graph.Neuron.Activation
module Regularisation = Graph.Neuron.Optimise.Regularisation
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Activation/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Activation/index.html deleted file mode 100644 index f8ed071fd..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Activation/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Activation (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Activation)

Module Neuron.Activation

type typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Activation.typ = -
  1. | Elu
  2. | Relu
  3. | Sigmoid
  4. | HardSigmoid
  5. | Softmax of int
  6. | Softplus
  7. | Softsign
  8. | Tanh
  9. | Relu6
  10. | LeakyRelu of float
  11. | TRelu of float
  12. | Custom of Optimise.Algodiff.t -> Optimise.Algodiff.t
  13. | None
type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Activation.neuron_typ = - {
  1. mutable activation : typ;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val run_activation : Optimise.Algodiff.t -> typ -> Optimise.Algodiff.t
val copy : neuron_typ -> neuron_typ
val activation_to_string : typ -> string
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Add/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Add/index.html deleted file mode 100644 index 48f1247a3..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Add/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Add (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Add)

Module Neuron.Add

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Add.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/AlphaDropout/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/AlphaDropout/index.html deleted file mode 100644 index 96b5b3ec5..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/AlphaDropout/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -AlphaDropout (owl-base.Owl_neural_generic.Make_Embedded.Neuron.AlphaDropout)

Module Neuron.AlphaDropout

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).AlphaDropout.neuron_typ = - {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Average/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Average/index.html deleted file mode 100644 index 21b380a47..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Average/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Average (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Average)

Module Neuron.Average

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Average.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/AvgPool1D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/AvgPool1D/index.html deleted file mode 100644 index 4d05a1f25..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/AvgPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -AvgPool1D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.AvgPool1D)

Module Neuron.AvgPool1D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).AvgPool1D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/AvgPool2D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/AvgPool2D/index.html deleted file mode 100644 index c2dfaa6a9..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/AvgPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -AvgPool2D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.AvgPool2D)

Module Neuron.AvgPool2D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).AvgPool2D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Concatenate/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Concatenate/index.html deleted file mode 100644 index 6d534e97f..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Concatenate/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Concatenate (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Concatenate)

Module Neuron.Concatenate

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Concatenate.neuron_typ = - {
  1. mutable axis : int;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Conv1D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Conv1D/index.html deleted file mode 100644 index 3fd1c8ad3..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Conv1D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Conv1D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Conv1D)

Module Neuron.Conv1D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Conv1D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Conv2D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Conv2D/index.html deleted file mode 100644 index 745df39ce..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Conv2D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Conv2D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Conv2D)

Module Neuron.Conv2D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Conv2D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Conv3D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Conv3D/index.html deleted file mode 100644 index f2db93d46..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Conv3D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Conv3D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Conv3D)

Module Neuron.Conv3D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Conv3D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/DilatedConv1D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/DilatedConv1D/index.html deleted file mode 100644 index d191632a0..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/DilatedConv1D/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -DilatedConv1D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.DilatedConv1D)

Module Neuron.DilatedConv1D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).DilatedConv1D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/DilatedConv2D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/DilatedConv2D/index.html deleted file mode 100644 index f4a05f134..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/DilatedConv2D/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -DilatedConv2D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.DilatedConv2D)

Module Neuron.DilatedConv2D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).DilatedConv2D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/DilatedConv3D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/DilatedConv3D/index.html deleted file mode 100644 index 414001a61..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/DilatedConv3D/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -DilatedConv3D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.DilatedConv3D)

Module Neuron.DilatedConv3D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).DilatedConv3D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Dot/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Dot/index.html deleted file mode 100644 index a2a781498..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Dot/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Dot (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Dot)

Module Neuron.Dot

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Dot.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Dropout/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Dropout/index.html deleted file mode 100644 index f4aba9186..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Dropout/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Dropout (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Dropout)

Module Neuron.Dropout

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Dropout.neuron_typ = - {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Embedding/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Embedding/index.html deleted file mode 100644 index b304dd8f3..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Embedding/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Embedding (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Embedding)

Module Neuron.Embedding

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Embedding.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_dim : int;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Flatten/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Flatten/index.html deleted file mode 100644 index f87b220b1..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Flatten/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Flatten (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Flatten)

Module Neuron.Flatten

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Flatten.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/FullyConnected/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/FullyConnected/index.html deleted file mode 100644 index 78a645399..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/FullyConnected/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -FullyConnected (owl-base.Owl_neural_generic.Make_Embedded.Neuron.FullyConnected)

Module Neuron.FullyConnected

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).FullyConnected.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GRU/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GRU/index.html deleted file mode 100644 index 353423b32..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GRU/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GRU (owl-base.Owl_neural_generic.Make_Embedded.Neuron.GRU)

Module Neuron.GRU

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).GRU.neuron_typ = - {
  1. mutable wxz : Optimise.Algodiff.t;
  2. mutable whz : Optimise.Algodiff.t;
  3. mutable wxr : Optimise.Algodiff.t;
  4. mutable whr : Optimise.Algodiff.t;
  5. mutable wxh : Optimise.Algodiff.t;
  6. mutable whh : Optimise.Algodiff.t;
  7. mutable bz : Optimise.Algodiff.t;
  8. mutable br : Optimise.Algodiff.t;
  9. mutable bh : Optimise.Algodiff.t;
  10. mutable h : Optimise.Algodiff.t;
  11. mutable init_typ : Init.typ;
  12. mutable in_shape : int array;
  13. mutable out_shape : int array;
}
val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GaussianDropout/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GaussianDropout/index.html deleted file mode 100644 index c16f0241e..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GaussianDropout/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GaussianDropout (owl-base.Owl_neural_generic.Make_Embedded.Neuron.GaussianDropout)

Module Neuron.GaussianDropout

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).GaussianDropout.neuron_typ = - {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GaussianNoise/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GaussianNoise/index.html deleted file mode 100644 index a02fa98ef..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GaussianNoise/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GaussianNoise (owl-base.Owl_neural_generic.Make_Embedded.Neuron.GaussianNoise)

Module Neuron.GaussianNoise

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).GaussianNoise.neuron_typ = - {
  1. mutable sigma : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalAvgPool1D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalAvgPool1D/index.html deleted file mode 100644 index 286dd7929..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalAvgPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalAvgPool1D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.GlobalAvgPool1D)

Module Neuron.GlobalAvgPool1D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).GlobalAvgPool1D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalAvgPool2D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalAvgPool2D/index.html deleted file mode 100644 index 3b1232dde..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalAvgPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalAvgPool2D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.GlobalAvgPool2D)

Module Neuron.GlobalAvgPool2D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).GlobalAvgPool2D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalMaxPool1D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalMaxPool1D/index.html deleted file mode 100644 index aee4345c7..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalMaxPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalMaxPool1D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.GlobalMaxPool1D)

Module Neuron.GlobalMaxPool1D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).GlobalMaxPool1D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalMaxPool2D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalMaxPool2D/index.html deleted file mode 100644 index 7bfd84d16..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/GlobalMaxPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalMaxPool2D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.GlobalMaxPool2D)

Module Neuron.GlobalMaxPool2D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).GlobalMaxPool2D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Init/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Init/index.html deleted file mode 100644 index 5f1d15bac..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Init/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Init (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Init)

Module Neuron.Init

type typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Init.typ = -
  1. | Uniform of float * float
  2. | Gaussian of float * float
  3. | Standard
  4. | Tanh
  5. | GlorotNormal
  6. | GlorotUniform
  7. | LecunNormal
  8. | HeNormal
  9. | Custom of int array -> Optimise.Algodiff.t
val calc_fans : int array -> float * float
val run : typ -> int array -> Optimise.Algodiff.t -> Optimise.Algodiff.t
val to_string : typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Input/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Input/index.html deleted file mode 100644 index e45568e82..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Input/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Input (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Input)

Module Neuron.Input

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Input.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : int array -> neuron_typ
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/LSTM/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/LSTM/index.html deleted file mode 100644 index c3188fc09..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/LSTM/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -LSTM (owl-base.Owl_neural_generic.Make_Embedded.Neuron.LSTM)

Module Neuron.LSTM

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).LSTM.neuron_typ = - {
  1. mutable wxi : Optimise.Algodiff.t;
  2. mutable whi : Optimise.Algodiff.t;
  3. mutable wxc : Optimise.Algodiff.t;
  4. mutable whc : Optimise.Algodiff.t;
  5. mutable wxf : Optimise.Algodiff.t;
  6. mutable whf : Optimise.Algodiff.t;
  7. mutable wxo : Optimise.Algodiff.t;
  8. mutable who : Optimise.Algodiff.t;
  9. mutable bi : Optimise.Algodiff.t;
  10. mutable bc : Optimise.Algodiff.t;
  11. mutable bf : Optimise.Algodiff.t;
  12. mutable bo : Optimise.Algodiff.t;
  13. mutable c : Optimise.Algodiff.t;
  14. mutable h : Optimise.Algodiff.t;
  15. mutable init_typ : Init.typ;
  16. mutable in_shape : int array;
  17. mutable out_shape : int array;
}
val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Lambda/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Lambda/index.html deleted file mode 100644 index 1e93e7ee5..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Lambda/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Lambda (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Lambda)

Module Neuron.Lambda

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Lambda.neuron_typ = - {
  1. mutable lambda : Optimise.Algodiff.t -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : - ?out_shape:int array -> - (Optimise.Algodiff.t -> Optimise.Algodiff.t) -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/LambdaArray/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/LambdaArray/index.html deleted file mode 100644 index 1ca7d3861..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/LambdaArray/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -LambdaArray (owl-base.Owl_neural_generic.Make_Embedded.Neuron.LambdaArray)

Module Neuron.LambdaArray

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).LambdaArray.neuron_typ = - {
  1. mutable lambda : Optimise.Algodiff.t array -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : - int array -> - (Optimise.Algodiff.t array -> Optimise.Algodiff.t) -> - neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Linear/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Linear/index.html deleted file mode 100644 index a6c1f5094..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Linear/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Linear (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Linear)

Module Neuron.Linear

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Linear.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/LinearNoBias/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/LinearNoBias/index.html deleted file mode 100644 index cf88c53a2..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/LinearNoBias/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -LinearNoBias (owl-base.Owl_neural_generic.Make_Embedded.Neuron.LinearNoBias)

Module Neuron.LinearNoBias

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).LinearNoBias.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_shape : int array;
  4. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Masking/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Masking/index.html deleted file mode 100644 index d4389884f..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Masking/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Masking (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Masking)

Module Neuron.Masking

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Max/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Max/index.html deleted file mode 100644 index 703d09b22..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Max/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Max (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Max)

Module Neuron.Max

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Max.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/MaxPool1D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/MaxPool1D/index.html deleted file mode 100644 index 0d56b68a6..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/MaxPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -MaxPool1D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.MaxPool1D)

Module Neuron.MaxPool1D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).MaxPool1D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/MaxPool2D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/MaxPool2D/index.html deleted file mode 100644 index 1efed1b11..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/MaxPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -MaxPool2D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.MaxPool2D)

Module Neuron.MaxPool2D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).MaxPool2D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Mul/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Mul/index.html deleted file mode 100644 index 0fd799c9a..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Mul/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Mul (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Mul)

Module Neuron.Mul

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Mul.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Normalisation/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Normalisation/index.html deleted file mode 100644 index e0d288815..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Normalisation/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Normalisation (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Normalisation)

Module Neuron.Normalisation

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Normalisation.neuron_typ = - {
  1. mutable axis : int;
  2. mutable beta : Optimise.Algodiff.t;
  3. mutable gamma : Optimise.Algodiff.t;
  4. mutable mu : Optimise.Algodiff.t;
  5. mutable var : Optimise.Algodiff.t;
  6. mutable decay : Optimise.Algodiff.t;
  7. mutable training : bool;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?training:bool -> - ?decay:float -> - ?mu:Optimise.Algodiff.A.arr -> - ?var:Optimise.Algodiff.A.arr -> - int -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val load_weights : neuron_typ -> Optimise.Algodiff.t array -> unit
val save_weights : neuron_typ -> Optimise.Algodiff.t array
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/Linalg/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index 0b7f701f8..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/Mat/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index fee880d29..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/Scalar/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index 3533de423..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/index.html deleted file mode 100644 index f0778ba1d..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,160 +0,0 @@ - -A (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.A)

Module Algodiff.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Arr/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Arr/index.html deleted file mode 100644 index 8e2e095cb..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/index.html deleted file mode 100644 index 837cf8cf4..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.Builder)

Module Algodiff.Builder

module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t
module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t
module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t
module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array
module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t
module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index c11ac0247..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index 460c739af..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index 6a99870ee..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index cce5e8d84..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index 02edaf23f..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index 1d45fb46a..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Linalg/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index ffc1fbc66..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t
val logdet : t -> t
val chol : ?upper:bool -> t -> t
val qr : t -> t * t
val lq : t -> t * t
val svd : ?thin:bool -> t -> t * t * t
val sylvester : t -> t -> t -> t
val lyapunov : t -> t -> t
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - t -> - t -> - t
val (/@) : t -> t -> t
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> t -> t -> t
val care : ?diag_r:bool -> t -> t -> t -> t -> t
val dare : ?diag_r:bool -> t -> t -> t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Mat/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Mat/index.html deleted file mode 100644 index dd24581ec..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Maths/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Maths/index.html deleted file mode 100644 index 6c61e472b..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t
val (-) : t -> t -> t
val (*) : t -> t -> t
val (/) : t -> t -> t
val (*@) : t -> t -> t
val (**) : t -> t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val kron : t -> t -> t
val dot : t -> t -> t
val pow : t -> t -> t
val atan2 : t -> t -> t
val min2 : t -> t -> t
val max2 : t -> t -> t
val cross_entropy : t -> t -> t
val inv : t -> t
val neg : t -> t
val abs : t -> t
val signum : t -> t
val floor : t -> t
val ceil : t -> t
val round : t -> t
val sqr : t -> t
val sqrt : t -> t
val log : t -> t
val log2 : t -> t
val log10 : t -> t
val exp : t -> t
val sin : t -> t
val cos : t -> t
val tan : t -> t
val sinh : t -> t
val cosh : t -> t
val tanh : t -> t
val asin : t -> t
val acos : t -> t
val atan : t -> t
val asinh : t -> t
val acosh : t -> t
val atanh : t -> t
val sum' : t -> t
val log_sum_exp' : t -> t
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t
val sum : ?axis:int -> ?keep_dims:bool -> t -> t
val sum_reduce : ?axis:int array -> t -> t
val mean : t -> t
val transpose : ?axis:int array -> t -> t
val swap : int -> int -> t -> t
val l1norm' : t -> t
val l2norm' : t -> t
val l2norm_sqr' : t -> t
val sigmoid : t -> t
val relu : t -> t
val dawsn : t -> t
val softplus : t -> t
val softsign : t -> t
val softmax : ?axis:int -> t -> t
val reshape : t -> int array -> t
val flatten : t -> t
val get_item : t -> int -> int -> t
val get_row : t -> int -> t
val concat : axis:int -> t -> t -> t
val split : axis:int -> int array -> t -> t array
val of_arrays : t array array -> t
val to_arrays : t -> t array array
val concatenate : axis:int -> t array -> t
val stack : axis:int -> t array -> t
val get_slice : int list list -> t -> t
val set_slice : int list list -> t -> t -> t
val get_fancy : Owl_types.index list -> t -> t
val set_fancy : Owl_types.index list -> t -> t -> t
val diag : ?k:int -> t -> t
val diagm : ?k:int -> t -> t
val trace : t -> t
val triu : ?k:int -> t -> t
val tril : ?k:int -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/NN/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/NN/index.html deleted file mode 100644 index 7dc71fa4f..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t
val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val upsampling2d : t -> int array -> t
val pad : ?v:A.elt -> int list list -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/index.html deleted file mode 100644 index b1d3ba658..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Algodiff/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Algodiff (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Algodiff)

Module Optimise.Algodiff

module A : sig ... end
type t = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Optimise.Algodiff.t = -
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
val tag : unit -> int
val primal : t -> t
val primal' : t -> t
val zero : t -> t
val reset_zero : t -> t
val tangent : t -> t
val adjref : t -> t Stdlib.ref
val adjval : t -> t
val shape : t -> int array
val is_float : t -> bool
val is_arr : t -> bool
val row_num : t -> int
val col_num : t -> int
val numel : t -> int
val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t
val clip_by_l2norm : A.elt -> t -> t
val copy_primal' : t -> t
val tile : t -> int array -> t
val repeat : t -> int array -> t
val pack_elt : A.elt -> t
val unpack_elt : t -> A.elt
val pack_flt : float -> t
val _f : float -> t
val unpack_flt : t -> float
val pack_arr : A.arr -> t
val unpack_arr : t -> A.arr
val deep_info : t -> string
val type_info : t -> string
val error_binop : string -> t -> t -> 'a
val error_uniop : string -> t -> 'a
val make_forward : t -> t -> int -> t
val make_reverse : t -> int -> t
val reverse_prop : t -> t -> unit
val diff : (t -> t) -> t -> t
val diff' : (t -> t) -> t -> t * t
val grad : (t -> t) -> t -> t
val grad' : (t -> t) -> t -> t * t
val jacobian : (t -> t) -> t -> t
val jacobian' : (t -> t) -> t -> t * t
val jacobianv : (t -> t) -> t -> t -> t
val jacobianv' : (t -> t) -> t -> t -> t * t
val jacobianTv : (t -> t) -> t -> t -> t
val jacobianTv' : (t -> t) -> t -> t -> t * t
val hessian : (t -> t) -> t -> t
val hessian' : (t -> t) -> t -> t * t
val hessianv : (t -> t) -> t -> t -> t
val hessianv' : (t -> t) -> t -> t -> t * t
val laplacian : (t -> t) -> t -> t
val laplacian' : (t -> t) -> t -> t * t
val gradhessian : (t -> t) -> t -> t * t
val gradhessian' : (t -> t) -> t -> t * t * t
val gradhessianv : (t -> t) -> t -> t -> t * t
val gradhessianv' : (t -> t) -> t -> t -> t * t * t
module Builder : sig ... end
module Maths : sig ... end
module Linalg : sig ... end
module NN : sig ... end
module Mat : sig ... end
module Arr : sig ... end
val to_trace : t list -> string
val to_dot : t list -> string
val pp_num : Stdlib.Format.formatter -> t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Batch/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Batch/index.html deleted file mode 100644 index 8a437d15b..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Batch/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Batch (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Batch)

Module Optimise.Batch

val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val batches : typ -> Algodiff.t -> int
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Checkpoint/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Checkpoint/index.html deleted file mode 100644 index 4d0ca8a6a..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Checkpoint/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Checkpoint (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Checkpoint)

Module Optimise.Checkpoint

type state = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Optimise.Checkpoint.state = - {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}
type typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Optimise.Checkpoint.typ = -
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None
val init_state : int -> float -> state
val default_checkpoint_fun : (string -> 'a) -> 'a
val print_state_info : state -> unit
val print_summary : state -> unit
val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Clipping/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Clipping/index.html deleted file mode 100644 index 6fb814733..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Clipping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Clipping (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Clipping)

Module Optimise.Clipping

val run : typ -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Gradient/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Gradient/index.html deleted file mode 100644 index c4fba17fd..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Gradient/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Gradient (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Gradient)

Module Optimise.Gradient

type typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Optimise.Gradient.typ = -
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton
val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Learning_Rate/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Learning_Rate/index.html deleted file mode 100644 index ddd1bce3b..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Learning_Rate (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

type typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Optimise.Learning_Rate.typ = -
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t
val default : typ -> typ
val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Loss/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Loss/index.html deleted file mode 100644 index f97ada9ed..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Loss/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Loss (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Loss)

Module Optimise.Loss

type typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Optimise.Loss.typ = -
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Momentum/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Momentum/index.html deleted file mode 100644 index a5d0c6c02..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Momentum/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Momentum (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Momentum)

Module Optimise.Momentum

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Params/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Params/index.html deleted file mode 100644 index fa1c2b15f..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Params/index.html +++ /dev/null @@ -1,16 +0,0 @@ - -Params (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Params)

Module Optimise.Params

type typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Optimise.Params.typ = - {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}
val default : unit -> typ
val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Regularisation/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Regularisation/index.html deleted file mode 100644 index c164be4d2..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Regularisation/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Regularisation (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Regularisation)

Module Optimise.Regularisation

type typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Optimise.Regularisation.typ = -
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None
val run : typ -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Stopping/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Stopping/index.html deleted file mode 100644 index 1621d3282..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Stopping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Stopping (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Stopping)

Module Optimise.Stopping

val run : typ -> float -> bool
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Utils/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Utils/index.html deleted file mode 100644 index 4a59b1f59..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise.Utils)

Module Optimise.Utils

val sample_num : Algodiff.t -> int
val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/index.html deleted file mode 100644 index 3a79997d8..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Optimise)

Module Neuron.Optimise

module Algodiff : sig ... end
module Utils : sig ... end
module Learning_Rate : sig ... end
module Batch : sig ... end
module Loss : sig ... end
module Gradient : sig ... end
module Momentum : sig ... end
module Regularisation : sig ... end
module Clipping : sig ... end
module Stopping : sig ... end
module Checkpoint : sig ... end
module Params : sig ... end
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Padding1D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Padding1D/index.html deleted file mode 100644 index e2269c875..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Padding1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding1D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Padding1D)

Module Neuron.Padding1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Padding2D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Padding2D/index.html deleted file mode 100644 index 60e876396..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Padding2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Padding2D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Padding2D)

Module Neuron.Padding2D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Padding2D.neuron_typ = - {
  1. mutable padding : int array array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int array array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Padding3D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Padding3D/index.html deleted file mode 100644 index 76eac08a2..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Padding3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding3D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Padding3D)

Module Neuron.Padding3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Recurrent/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Recurrent/index.html deleted file mode 100644 index a257ab26d..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Recurrent/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Recurrent (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Recurrent)

Module Neuron.Recurrent

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Recurrent.neuron_typ = - {
  1. mutable whh : Optimise.Algodiff.t;
  2. mutable wxh : Optimise.Algodiff.t;
  3. mutable why : Optimise.Algodiff.t;
  4. mutable bh : Optimise.Algodiff.t;
  5. mutable by : Optimise.Algodiff.t;
  6. mutable h : Optimise.Algodiff.t;
  7. mutable hiddens : int;
  8. mutable act : Activation.typ;
  9. mutable init_typ : Init.typ;
  10. mutable in_shape : int array;
  11. mutable out_shape : int array;
}
val create : - ?time_steps:int -> - ?inputs:int -> - int -> - int -> - Activation.typ -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Reshape/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Reshape/index.html deleted file mode 100644 index f056e35b9..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Reshape/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Reshape (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Reshape)

Module Neuron.Reshape

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Reshape.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : ?inputs:int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Slice/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Slice/index.html deleted file mode 100644 index 3e95f4b98..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/Slice/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Slice (owl-base.Owl_neural_generic.Make_Embedded.Neuron.Slice)

Module Neuron.Slice

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).Slice.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
  3. mutable slice : int list list;
}
val create : int list list -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/TransposeConv1D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/TransposeConv1D/index.html deleted file mode 100644 index 289f89732..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/TransposeConv1D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -TransposeConv1D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.TransposeConv1D)

Module Neuron.TransposeConv1D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).TransposeConv1D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/TransposeConv2D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/TransposeConv2D/index.html deleted file mode 100644 index ddc7f3926..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/TransposeConv2D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -TransposeConv2D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.TransposeConv2D)

Module Neuron.TransposeConv2D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).TransposeConv2D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/TransposeConv3D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/TransposeConv3D/index.html deleted file mode 100644 index fac2cf27e..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/TransposeConv3D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -TransposeConv3D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.TransposeConv3D)

Module Neuron.TransposeConv3D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).TransposeConv3D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/UpSampling1D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/UpSampling1D/index.html deleted file mode 100644 index f042c3be7..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/UpSampling1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling1D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.UpSampling1D)

Module Neuron.UpSampling1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/UpSampling2D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/UpSampling2D/index.html deleted file mode 100644 index e2fb09be2..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/UpSampling2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -UpSampling2D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.UpSampling2D)

Module Neuron.UpSampling2D

type neuron_typ = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).UpSampling2D.neuron_typ = - {
  1. mutable size : int array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/UpSampling3D/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/UpSampling3D/index.html deleted file mode 100644 index 111713578..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/UpSampling3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling3D (owl-base.Owl_neural_generic.Make_Embedded.Neuron.UpSampling3D)

Module Neuron.UpSampling3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/index.html b/owl-base/Owl_neural_generic/Make_Embedded/Neuron/index.html deleted file mode 100644 index 73ac6fec8..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/Neuron/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Neuron (owl-base.Owl_neural_generic.Make_Embedded.Neuron)

Module Make_Embedded.Neuron

module Optimise : sig ... end
module Init : sig ... end
module Input : sig ... end
module Activation : sig ... end
module Linear : sig ... end
module LinearNoBias : sig ... end
module Recurrent : sig ... end
module LSTM : sig ... end
module GRU : sig ... end
module Conv1D : sig ... end
module Conv2D : sig ... end
module Conv3D : sig ... end
module DilatedConv1D : sig ... end
module DilatedConv2D : sig ... end
module DilatedConv3D : sig ... end
module TransposeConv1D : sig ... end
module TransposeConv2D : sig ... end
module TransposeConv3D : sig ... end
module FullyConnected : sig ... end
module MaxPool1D : sig ... end
module MaxPool2D : sig ... end
module AvgPool1D : sig ... end
module AvgPool2D : sig ... end
module GlobalMaxPool1D : sig ... end
module GlobalMaxPool2D : sig ... end
module GlobalAvgPool1D : sig ... end
module GlobalAvgPool2D : sig ... end
module UpSampling1D : sig ... end
module UpSampling2D : sig ... end
module UpSampling3D : sig ... end
module Padding1D : sig ... end
module Padding2D : sig ... end
module Padding3D : sig ... end
module Lambda : sig ... end
module LambdaArray : sig ... end
module Dropout : sig ... end
module Reshape : sig ... end
module Flatten : sig ... end
module Slice : sig ... end
module Add : sig ... end
module Mul : sig ... end
module Dot : sig ... end
module Max : sig ... end
module Average : sig ... end
module Concatenate : sig ... end
module Normalisation : sig ... end
module GaussianNoise : sig ... end
module GaussianDropout : sig ... end
module AlphaDropout : sig ... end
module Embedding : sig ... end
module Masking : sig ... end
type neuron = - Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A))).neuron = -
  1. | Input of Input.neuron_typ
  2. | Linear of Linear.neuron_typ
  3. | LinearNoBias of LinearNoBias.neuron_typ
  4. | Embedding of Embedding.neuron_typ
  5. | LSTM of LSTM.neuron_typ
  6. | GRU of GRU.neuron_typ
  7. | Recurrent of Recurrent.neuron_typ
  8. | Conv1D of Conv1D.neuron_typ
  9. | Conv2D of Conv2D.neuron_typ
  10. | Conv3D of Conv3D.neuron_typ
  11. | DilatedConv1D of DilatedConv1D.neuron_typ
  12. | DilatedConv2D of DilatedConv2D.neuron_typ
  13. | DilatedConv3D of DilatedConv3D.neuron_typ
  14. | TransposeConv1D of TransposeConv1D.neuron_typ
  15. | TransposeConv2D of TransposeConv2D.neuron_typ
  16. | TransposeConv3D of TransposeConv3D.neuron_typ
  17. | FullyConnected of FullyConnected.neuron_typ
  18. | MaxPool1D of MaxPool1D.neuron_typ
  19. | MaxPool2D of MaxPool2D.neuron_typ
  20. | AvgPool1D of AvgPool1D.neuron_typ
  21. | AvgPool2D of AvgPool2D.neuron_typ
  22. | GlobalMaxPool1D of GlobalMaxPool1D.neuron_typ
  23. | GlobalMaxPool2D of GlobalMaxPool2D.neuron_typ
  24. | GlobalAvgPool1D of GlobalAvgPool1D.neuron_typ
  25. | GlobalAvgPool2D of GlobalAvgPool2D.neuron_typ
  26. | UpSampling2D of UpSampling2D.neuron_typ
  27. | Padding2D of Padding2D.neuron_typ
  28. | Dropout of Dropout.neuron_typ
  29. | Reshape of Reshape.neuron_typ
  30. | Flatten of Flatten.neuron_typ
  31. | Slice of Slice.neuron_typ
  32. | Lambda of Lambda.neuron_typ
  33. | LambdaArray of LambdaArray.neuron_typ
  34. | Activation of Activation.neuron_typ
  35. | GaussianNoise of GaussianNoise.neuron_typ
  36. | GaussianDropout of GaussianDropout.neuron_typ
  37. | AlphaDropout of AlphaDropout.neuron_typ
  38. | Normalisation of Normalisation.neuron_typ
  39. | Add of Add.neuron_typ
  40. | Mul of Mul.neuron_typ
  41. | Dot of Dot.neuron_typ
  42. | Max of Max.neuron_typ
  43. | Average of Average.neuron_typ
  44. | Concatenate of Concatenate.neuron_typ
val get_in_out_shape : neuron -> int array * int array
val get_in_shape : neuron -> int array
val get_out_shape : neuron -> int array
val connect : int array array -> neuron -> unit
val init : neuron -> unit
val reset : neuron -> unit
val mktag : int -> neuron -> unit
val mkpar : neuron -> Optimise.Algodiff.t array
val mkpri : neuron -> Optimise.Algodiff.t array
val mkadj : neuron -> Optimise.Algodiff.t array
val update : neuron -> Optimise.Algodiff.t array -> unit
val load_weights : neuron -> Optimise.Algodiff.t array -> unit
val save_weights : neuron -> Optimise.Algodiff.t array
val copy : neuron -> neuron
val to_string : neuron -> string
val to_name : neuron -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/Linalg/index.html b/owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/Linalg/index.html deleted file mode 100644 index 27820ab4a..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_generic.Make_Embedded.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/Mat/index.html b/owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/Mat/index.html deleted file mode 100644 index d0af63bee..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_generic.Make_Embedded.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/Scalar/index.html b/owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/Scalar/index.html deleted file mode 100644 index 9a616012c..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_neural_generic.Make_Embedded.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/index.html b/owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/index.html deleted file mode 100644 index d7510ff15..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/argument-1-A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_neural_generic.Make_Embedded.A)

Parameter Make_Embedded.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_neural_generic/Make_Embedded/index.html b/owl-base/Owl_neural_generic/Make_Embedded/index.html deleted file mode 100644 index 2b074853d..000000000 --- a/owl-base/Owl_neural_generic/Make_Embedded/index.html +++ /dev/null @@ -1,247 +0,0 @@ - -Make_Embedded (owl-base.Owl_neural_generic.Make_Embedded)

Module Owl_neural_generic.Make_Embedded

Parameters

Signature

include sig ... end
module Neuron : sig ... end
type node = - Owl_neural_graph.Make(Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)))).node = - {
  1. mutable name : string;
  2. mutable prev : node array;
  3. mutable next : node array;
  4. mutable neuron : Neuron.neuron;
  5. mutable output : Neuron.Optimise.Algodiff.t option;
  6. mutable network : network;
  7. mutable train : bool;
}
and network = - Owl_neural_graph.Make(Owl_neural_neuron.Make(Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)))).network = - {
  1. mutable nnid : string;
  2. mutable size : int;
  3. mutable roots : node array;
  4. mutable outputs : node array;
  5. mutable topo : node array;
}
val make_network : ?nnid:string -> int -> node array -> node array -> network
val make_node : - ?name:string -> - ?train:bool -> - node array -> - node array -> - Neuron.neuron -> - Neuron.Optimise.Algodiff.t option -> - network -> - node
val get_roots : network -> node array
val get_outputs : network -> node array
val get_node : network -> string -> node
val get_network : ?name:string -> node -> network
val outputs : ?name:string -> node array -> network
val get_network_name : network -> string
val set_network_name : network -> string -> unit
val input_shape : network -> int array
val input_shapes : network -> int array array
val collect_output : node array -> Neuron.Optimise.Algodiff.t array
val connect_pair : node -> node -> unit
val connect_to_parents : node array -> node -> unit
val add_node : - ?act_typ:Neuron.Activation.typ -> - network -> - node array -> - node -> - node
val init : network -> unit
val reset : network -> unit
val mktag : int -> network -> unit
val mkpar : network -> Neuron.Optimise.Algodiff.t array array
val mkpri : network -> Neuron.Optimise.Algodiff.t array array
val mkadj : network -> Neuron.Optimise.Algodiff.t array array
val update : network -> Neuron.Optimise.Algodiff.t array array -> unit
val run_inputs : - Neuron.Optimise.Algodiff.t array -> - network -> - Neuron.Optimise.Algodiff.t array
val forward_inputs : - network -> - Neuron.Optimise.Algodiff.t array -> - Neuron.Optimise.Algodiff.t array * Neuron.Optimise.Algodiff.t array array
val backward : - network -> - Neuron.Optimise.Algodiff.t -> - Neuron.Optimise.Algodiff.t array array - * Neuron.Optimise.Algodiff.t array array
val copy : network -> network
val _remove_training_nodes : network -> unit
val model_inputs : - network -> - Neuron.Optimise.Algodiff.A.arr array -> - Neuron.Optimise.Algodiff.A.arr array
val input : ?name:string -> int array -> node
val inputs : ?names:string array -> int array array -> node array
val activation : ?name:string -> Neuron.Activation.typ -> node -> node
val linear : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val linear_nobias : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val embedding : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node
val recurrent : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node
val lstm : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node
val gru : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node
val conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val dilated_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val dilated_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val dilated_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val transpose_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val transpose_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val transpose_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val fully_connected : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val max_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val max_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val avg_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val avg_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val global_max_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_max_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_avg_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_avg_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val upsampling2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - node -> - node
val padding2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array array -> - node -> - node
val dropout : ?name:string -> float -> node -> node
val gaussian_noise : ?name:string -> float -> node -> node
val gaussian_dropout : ?name:string -> float -> node -> node
val alpha_dropout : ?name:string -> float -> node -> node
val normalisation : - ?name:string -> - ?axis:int -> - ?training:bool -> - ?decay:float -> - ?mu:Neuron.Optimise.Algodiff.A.arr -> - ?var:Neuron.Optimise.Algodiff.A.arr -> - node -> - node
val reshape : ?name:string -> int array -> node -> node
val flatten : ?name:string -> node -> node
val slice : ?name:string -> int list list -> node -> node
val lambda : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - ?out_shape:int array -> - (Neuron.Optimise.Algodiff.t -> Neuron.Optimise.Algodiff.t) -> - node -> - node
val lambda_array : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - (Neuron.Optimise.Algodiff.t array -> Neuron.Optimise.Algodiff.t) -> - node array -> - node
val add : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val mul : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val dot : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val max : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val average : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node array -> - node
val concatenate : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int -> - node array -> - node
val to_string : network -> string
val pp_network : Stdlib.Format.formatter -> network -> unit
val print : network -> unit
val save : ?unsafe:bool -> network -> string -> unit
val load : string -> network
val save_weights : network -> string -> unit
val load_weights : network -> string -> unit
val make_subnetwork : - ?copy:bool -> - ?make_inputs:string array -> - network -> - string array -> - network
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/.dummy b/owl-base/Owl_neural_graph/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Activation/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Activation/index.html deleted file mode 100644 index a79a8d931..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Activation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Activation (owl-base.Owl_neural_graph.Make.Neuron.Activation)

Module Neuron.Activation

type typ =
  1. | Elu
  2. | Relu
  3. | Sigmoid
  4. | HardSigmoid
  5. | Softmax of int
  6. | Softplus
  7. | Softsign
  8. | Tanh
  9. | Relu6
  10. | LeakyRelu of float
  11. | TRelu of float
  12. | Custom of Optimise.Algodiff.t -> Optimise.Algodiff.t
  13. | None
    (*

    Types of activation functions.

    *)
type neuron_typ = {
  1. mutable activation : typ;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val run_activation : Optimise.Algodiff.t -> typ -> Optimise.Algodiff.t

Run one specific activation function.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val activation_to_string : typ -> string

Return the name of a specific activation function.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Add/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Add/index.html deleted file mode 100644 index 9ff50802d..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Add/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Add (owl-base.Owl_neural_graph.Make.Neuron.Add)

Module Neuron.Add

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/AlphaDropout/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/AlphaDropout/index.html deleted file mode 100644 index d90dcb7d8..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/AlphaDropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AlphaDropout (owl-base.Owl_neural_graph.Make.Neuron.AlphaDropout)

Module Neuron.AlphaDropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Average/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Average/index.html deleted file mode 100644 index c8afe80da..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Average/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Average (owl-base.Owl_neural_graph.Make.Neuron.Average)

Module Neuron.Average

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/AvgPool1D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/AvgPool1D/index.html deleted file mode 100644 index 4603fafe6..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/AvgPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AvgPool1D (owl-base.Owl_neural_graph.Make.Neuron.AvgPool1D)

Module Neuron.AvgPool1D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/AvgPool2D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/AvgPool2D/index.html deleted file mode 100644 index 443397b03..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/AvgPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AvgPool2D (owl-base.Owl_neural_graph.Make.Neuron.AvgPool2D)

Module Neuron.AvgPool2D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Concatenate/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Concatenate/index.html deleted file mode 100644 index 624a04374..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Concatenate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Concatenate (owl-base.Owl_neural_graph.Make.Neuron.Concatenate)

Module Neuron.Concatenate

type neuron_typ = {
  1. mutable axis : int;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : int -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Conv1D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Conv1D/index.html deleted file mode 100644 index 296ba9275..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Conv1D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv1D (owl-base.Owl_neural_graph.Make.Neuron.Conv1D)

Module Neuron.Conv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Conv2D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Conv2D/index.html deleted file mode 100644 index e67e2052c..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Conv2D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv2D (owl-base.Owl_neural_graph.Make.Neuron.Conv2D)

Module Neuron.Conv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Conv3D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Conv3D/index.html deleted file mode 100644 index a8816cc51..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Conv3D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv3D (owl-base.Owl_neural_graph.Make.Neuron.Conv3D)

Module Neuron.Conv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/DilatedConv1D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/DilatedConv1D/index.html deleted file mode 100644 index a5b6611cd..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/DilatedConv1D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv1D (owl-base.Owl_neural_graph.Make.Neuron.DilatedConv1D)

Module Neuron.DilatedConv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/DilatedConv2D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/DilatedConv2D/index.html deleted file mode 100644 index c49701dbf..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/DilatedConv2D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv2D (owl-base.Owl_neural_graph.Make.Neuron.DilatedConv2D)

Module Neuron.DilatedConv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/DilatedConv3D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/DilatedConv3D/index.html deleted file mode 100644 index 770e3278f..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/DilatedConv3D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv3D (owl-base.Owl_neural_graph.Make.Neuron.DilatedConv3D)

Module Neuron.DilatedConv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Dot/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Dot/index.html deleted file mode 100644 index 801a2f012..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Dot/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Dot (owl-base.Owl_neural_graph.Make.Neuron.Dot)

Module Neuron.Dot

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Dropout/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Dropout/index.html deleted file mode 100644 index f0d2ebeb3..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Dropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Dropout (owl-base.Owl_neural_graph.Make.Neuron.Dropout)

Module Neuron.Dropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Embedding/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Embedding/index.html deleted file mode 100644 index d28a469cd..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Embedding/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Embedding (owl-base.Owl_neural_graph.Make.Neuron.Embedding)

Module Neuron.Embedding

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_dim : int;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Flatten/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Flatten/index.html deleted file mode 100644 index 698c2270d..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Flatten/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Flatten (owl-base.Owl_neural_graph.Make.Neuron.Flatten)

Module Neuron.Flatten

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/FullyConnected/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/FullyConnected/index.html deleted file mode 100644 index 8b33d6060..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/FullyConnected/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -FullyConnected (owl-base.Owl_neural_graph.Make.Neuron.FullyConnected)

Module Neuron.FullyConnected

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GRU/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GRU/index.html deleted file mode 100644 index c80e1a7e2..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GRU/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GRU (owl-base.Owl_neural_graph.Make.Neuron.GRU)

Module Neuron.GRU

type neuron_typ = {
  1. mutable wxz : Optimise.Algodiff.t;
  2. mutable whz : Optimise.Algodiff.t;
  3. mutable wxr : Optimise.Algodiff.t;
  4. mutable whr : Optimise.Algodiff.t;
  5. mutable wxh : Optimise.Algodiff.t;
  6. mutable whh : Optimise.Algodiff.t;
  7. mutable bz : Optimise.Algodiff.t;
  8. mutable br : Optimise.Algodiff.t;
  9. mutable bh : Optimise.Algodiff.t;
  10. mutable h : Optimise.Algodiff.t;
  11. mutable init_typ : Init.typ;
  12. mutable in_shape : int array;
  13. mutable out_shape : int array;
}

Neuron type definition.

val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GaussianDropout/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GaussianDropout/index.html deleted file mode 100644 index 3885111fb..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GaussianDropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GaussianDropout (owl-base.Owl_neural_graph.Make.Neuron.GaussianDropout)

Module Neuron.GaussianDropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GaussianNoise/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GaussianNoise/index.html deleted file mode 100644 index e67c79268..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GaussianNoise/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GaussianNoise (owl-base.Owl_neural_graph.Make.Neuron.GaussianNoise)

Module Neuron.GaussianNoise

type neuron_typ = {
  1. mutable sigma : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalAvgPool1D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalAvgPool1D/index.html deleted file mode 100644 index e69e1fa12..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalAvgPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalAvgPool1D (owl-base.Owl_neural_graph.Make.Neuron.GlobalAvgPool1D)

Module Neuron.GlobalAvgPool1D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalAvgPool2D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalAvgPool2D/index.html deleted file mode 100644 index d0961b67d..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalAvgPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalAvgPool2D (owl-base.Owl_neural_graph.Make.Neuron.GlobalAvgPool2D)

Module Neuron.GlobalAvgPool2D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalMaxPool1D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalMaxPool1D/index.html deleted file mode 100644 index e0f193a1f..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalMaxPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalMaxPool1D (owl-base.Owl_neural_graph.Make.Neuron.GlobalMaxPool1D)

Module Neuron.GlobalMaxPool1D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalMaxPool2D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalMaxPool2D/index.html deleted file mode 100644 index fd87b5d04..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/GlobalMaxPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalMaxPool2D (owl-base.Owl_neural_graph.Make.Neuron.GlobalMaxPool2D)

Module Neuron.GlobalMaxPool2D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Init/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Init/index.html deleted file mode 100644 index 0cf981481..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Init/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Init (owl-base.Owl_neural_graph.Make.Neuron.Init)

Module Neuron.Init

type typ =
  1. | Uniform of float * float
  2. | Gaussian of float * float
  3. | Standard
  4. | Tanh
  5. | GlorotNormal
  6. | GlorotUniform
  7. | LecunNormal
  8. | HeNormal
  9. | Custom of int array -> Optimise.Algodiff.t
    (*

    Initialisation types

    *)
val calc_fans : int array -> float * float

Calculate fan-in and fan-out of weights.

val run : typ -> int array -> Optimise.Algodiff.t -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Input/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Input/index.html deleted file mode 100644 index a216c41f4..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Input/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Input (owl-base.Owl_neural_graph.Make.Neuron.Input)

Module Neuron.Input

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : int array -> neuron_typ

Create the neuron.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/LSTM/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/LSTM/index.html deleted file mode 100644 index c1dac4790..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/LSTM/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -LSTM (owl-base.Owl_neural_graph.Make.Neuron.LSTM)

Module Neuron.LSTM

type neuron_typ = {
  1. mutable wxi : Optimise.Algodiff.t;
  2. mutable whi : Optimise.Algodiff.t;
  3. mutable wxc : Optimise.Algodiff.t;
  4. mutable whc : Optimise.Algodiff.t;
  5. mutable wxf : Optimise.Algodiff.t;
  6. mutable whf : Optimise.Algodiff.t;
  7. mutable wxo : Optimise.Algodiff.t;
  8. mutable who : Optimise.Algodiff.t;
  9. mutable bi : Optimise.Algodiff.t;
  10. mutable bc : Optimise.Algodiff.t;
  11. mutable bf : Optimise.Algodiff.t;
  12. mutable bo : Optimise.Algodiff.t;
  13. mutable c : Optimise.Algodiff.t;
  14. mutable h : Optimise.Algodiff.t;
  15. mutable init_typ : Init.typ;
  16. mutable in_shape : int array;
  17. mutable out_shape : int array;
}

Neuron type definition.

val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Lambda/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Lambda/index.html deleted file mode 100644 index 87bcfa3b7..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Lambda/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Lambda (owl-base.Owl_neural_graph.Make.Neuron.Lambda)

Module Neuron.Lambda

type neuron_typ = {
  1. mutable lambda : Optimise.Algodiff.t -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?out_shape:int array -> - (Optimise.Algodiff.t -> Optimise.Algodiff.t) -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/LambdaArray/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/LambdaArray/index.html deleted file mode 100644 index 392f88d52..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/LambdaArray/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -LambdaArray (owl-base.Owl_neural_graph.Make.Neuron.LambdaArray)

Module Neuron.LambdaArray

type neuron_typ = {
  1. mutable lambda : Optimise.Algodiff.t array -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : - int array -> - (Optimise.Algodiff.t array -> Optimise.Algodiff.t) -> - neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Linear/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Linear/index.html deleted file mode 100644 index 39cbd260d..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Linear/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Linear (owl-base.Owl_neural_graph.Make.Neuron.Linear)

Module Neuron.Linear

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/LinearNoBias/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/LinearNoBias/index.html deleted file mode 100644 index 3ccce410f..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/LinearNoBias/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -LinearNoBias (owl-base.Owl_neural_graph.Make.Neuron.LinearNoBias)

Module Neuron.LinearNoBias

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_shape : int array;
  4. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Masking/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Masking/index.html deleted file mode 100644 index d29f6d891..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Masking/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Masking (owl-base.Owl_neural_graph.Make.Neuron.Masking)

Module Neuron.Masking

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Max/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Max/index.html deleted file mode 100644 index 3d563ce31..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Max/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Max (owl-base.Owl_neural_graph.Make.Neuron.Max)

Module Neuron.Max

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/MaxPool1D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/MaxPool1D/index.html deleted file mode 100644 index b116ea3db..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/MaxPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MaxPool1D (owl-base.Owl_neural_graph.Make.Neuron.MaxPool1D)

Module Neuron.MaxPool1D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/MaxPool2D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/MaxPool2D/index.html deleted file mode 100644 index 8a2e71a39..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/MaxPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MaxPool2D (owl-base.Owl_neural_graph.Make.Neuron.MaxPool2D)

Module Neuron.MaxPool2D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Mul/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Mul/index.html deleted file mode 100644 index cc5760cb7..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Mul/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mul (owl-base.Owl_neural_graph.Make.Neuron.Mul)

Module Neuron.Mul

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Normalisation/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Normalisation/index.html deleted file mode 100644 index ce9b8f935..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Normalisation/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Normalisation (owl-base.Owl_neural_graph.Make.Neuron.Normalisation)

Module Neuron.Normalisation

type neuron_typ = {
  1. mutable axis : int;
  2. mutable beta : Optimise.Algodiff.t;
  3. mutable gamma : Optimise.Algodiff.t;
  4. mutable mu : Optimise.Algodiff.t;
  5. mutable var : Optimise.Algodiff.t;
  6. mutable decay : Optimise.Algodiff.t;
  7. mutable training : bool;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?training:bool -> - ?decay:float -> - ?mu:Optimise.Algodiff.A.arr -> - ?var:Optimise.Algodiff.A.arr -> - int -> - neuron_typ

Create the neuron. Note that axis 0 is the batch axis.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the trainable parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update trainable parameters of the neuron, used by Optimise module.

val load_weights : neuron_typ -> Optimise.Algodiff.t array -> unit

Load both trainable and non-trainable parameters into the neuron.

val save_weights : neuron_typ -> Optimise.Algodiff.t array

Assemble both trainable and non-trainable parameters of the neuron.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/Linalg/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index 117d3e3ae..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/Mat/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index 3bb88d20f..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/Scalar/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index b47a4dd87..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/index.html deleted file mode 100644 index 52b9c0080..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.A)

Module Algodiff.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Arr/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Arr/index.html deleted file mode 100644 index 91b8ccee5..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/index.html deleted file mode 100644 index df686d1f4..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.Builder)

Module Algodiff.Builder

Ops Builder
module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t

build array input single output operations

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index f722fbe11..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index 3903456e9..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index f480b7351..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index 8df05ab54..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index 3f2a6aa72..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index 8089a13ae..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Linalg/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index c653eb076..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val logdet : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val chol : ?upper:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val qr : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val lq : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val svd : ?thin:bool -> t -> t * t * t

Refer to :doc:`owl_dense_ndarray_generic`

val sylvester : t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val lyapunov : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - t -> - t -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val (/@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val care : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dare : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Mat/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Mat/index.html deleted file mode 100644 index 81ff51996..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Maths/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Maths/index.html deleted file mode 100644 index 208adef2c..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (-) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (/) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (**) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val add : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val div : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val kron : t -> t -> t

Refer to :doc:`owl_dense_matrix_generic`

val dot : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cross_entropy : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val abs : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val round : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mean : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val swap : int -> int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_item : t -> int -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_row : t -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val concat : axis:int -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val split : axis:int -> int array -> t -> t array

Refer to :doc:`owl_dense_ndarray_generic`

val of_arrays : t array array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val to_arrays : t -> t array array

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diag : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diagm : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val trace : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val triu : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tril : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/NN/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/NN/index.html deleted file mode 100644 index 1e52fdd22..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:A.elt -> int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/index.html deleted file mode 100644 index 31a04b8b0..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Algodiff/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Algodiff (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Algodiff)

Module Optimise.Algodiff

include Owl_algodiff_core_sig.Sig
Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

val make_forward : t -> t -> int -> t

TODO

val make_reverse : t -> int -> t

TODO

val reverse_prop : t -> t -> unit

TODO

val diff : (t -> t) -> t -> t

diff f x returns the exat derivative of a function f : scalar -> scalar at point x. Simply calling diff f will return its derivative function g of the same type, i.e. g : scalar -> scalar.

Keep calling this function will give you higher-order derivatives of f, i.e. f |> diff |> diff |> diff |> ...

val diff' : (t -> t) -> t -> t * t

similar to diff, but return (f x, diff f x).

val grad : (t -> t) -> t -> t

gradient of f : (vector -> scalar) at x, reverse ad.

val grad' : (t -> t) -> t -> t * t

similar to grad, but return (f x, grad f x).

val jacobian : (t -> t) -> t -> t

jacobian of f : (vector -> vector) at x, both x and y are row vectors.

val jacobian' : (t -> t) -> t -> t * t

similar to jacobian, but return (f x, jacobian f x)

val jacobianv : (t -> t) -> t -> t -> t

jacobian vector product of f : (vector -> vector) at x along v, forward ad. Namely, it calcultes (jacobian x) v

val jacobianv' : (t -> t) -> t -> t -> t * t

similar to jacobianv', but return (f x, jacobianv f x v)

val jacobianTv : (t -> t) -> t -> t -> t

transposed jacobian vector product of f : (vector -> vector) at x along v, backward ad. Namely, it calculates transpose ((jacobianv f x v)).

val jacobianTv' : (t -> t) -> t -> t -> t * t

similar to jacobianTv, but return (f x, transpose (jacobianv f x v))

val hessian : (t -> t) -> t -> t

hessian of f : (scalar -> scalar) at x.

val hessian' : (t -> t) -> t -> t * t

simiarl to hessian, but return (f x, hessian f x)

val hessianv : (t -> t) -> t -> t -> t

hessian vector product of f : (scalar -> scalar) at x along v. Namely, it calculates (hessian x) v.

val hessianv' : (t -> t) -> t -> t -> t * t

similar to hessianv, but return (f x, hessianv f x v).

val laplacian : (t -> t) -> t -> t

laplacian of f : (scalar -> scalar) at x.

val laplacian' : (t -> t) -> t -> t * t

similar to laplacian, but return (f x, laplacian f x).

val gradhessian : (t -> t) -> t -> t * t

return (grad f x, hessian f x), f : (scalar -> scalar)

val gradhessian' : (t -> t) -> t -> t * t * t

return (f x, grad f x, hessian f x)

val gradhessianv : (t -> t) -> t -> t -> t * t

return (grad f x v, hessian f x v)

val gradhessianv' : (t -> t) -> t -> t -> t * t * t

return (f x, grad f x v, hessian f x v)

include Owl_algodiff_ops_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
module Builder : - Owl_algodiff_ops_builder_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
Supported Maths functions
module Maths : sig ... end
Supported Linalg functions
module Linalg : sig ... end
Supported Neural Network functions
module NN : sig ... end
Supported Mat functions
module Mat : sig ... end
Supported Arr functions
module Arr : sig ... end
Helper functions
include Owl_algodiff_graph_convert_sig.Sig with type t := t
val to_trace : t list -> string

to_trace [t0; t1; ...] outputs the trace of computation graph on the terminal in a human-readable format.

val to_dot : t list -> string

to_dot [t0; t1; ...] outputs the trace of computation graph in the dot file format which you can use other tools further visualisation, such as Graphviz.

val pp_num : Stdlib.Format.formatter -> t -> unit

pp_num t pretty prints the abstract number used in Algodiff.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Batch/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Batch/index.html deleted file mode 100644 index c5f68a2a6..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Batch/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Batch (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Batch)

Module Optimise.Batch

Batch module

type typ =
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic

Types of batches.

val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

Execute the computations defined in module typ.

val batches : typ -> Algodiff.t -> int

Return the total number of batches given a batch typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Checkpoint/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Checkpoint/index.html deleted file mode 100644 index 2d775aaf8..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Checkpoint/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Checkpoint (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Checkpoint)

Module Optimise.Checkpoint

Checkpoint module

type state = {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}

Type definition of checkpoint

type typ =
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None

Batch type.

val init_state : int -> float -> state

init_state batches_per_epoch epochs initialises a state by specifying the number of batches per epoch and the number of epochs in total.

val default_checkpoint_fun : (string -> 'a) -> 'a

This function is used for saving intermediate files during optimisation.

val print_state_info : state -> unit

Print out the detail information of current state.

val print_summary : state -> unit

Print out the summary of current state.

val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Clipping/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Clipping/index.html deleted file mode 100644 index a320fcb9a..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Clipping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Clipping (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Clipping)

Module Optimise.Clipping

Clipping module

type typ =
  1. | L2norm of float
  2. | Value of float * float
  3. | None

Types of clipping functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Gradient/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Gradient/index.html deleted file mode 100644 index c78a61d2c..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Gradient/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Gradient (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Gradient)

Module Optimise.Gradient

Gradient module

type typ =
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton

Types of gradient function.

val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Learning_Rate/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Learning_Rate/index.html deleted file mode 100644 index d57d92786..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Learning_Rate (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

Strategies for learning rate update

type typ =
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array

Representation of learning rate update strategies. Possible values include:

  • Adam (alpha, beta1, beta2), see ref for parameter meaning
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array

Update the cache of gradients.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Loss/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Loss/index.html deleted file mode 100644 index 98ad780fc..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Loss/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Loss (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Loss)

Module Optimise.Loss

Loss module

type typ =
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t

Types of loss functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Momentum/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Momentum/index.html deleted file mode 100644 index dfe97ac85..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Momentum/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Momentum (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Momentum)

Module Optimise.Momentum

Momentum module

type typ =
  1. | Standard of float
  2. | Nesterov of float
  3. | None

Types of momentum functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Params/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Params/index.html deleted file mode 100644 index 731ef9162..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Params/index.html +++ /dev/null @@ -1,14 +0,0 @@ - -Params (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Params)

Module Optimise.Params

Params module

type typ = {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}

Type definition of parameter.

val default : unit -> typ

Create module typ with default values.

val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ

This function creates a parameter object with many configurations.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Regularisation/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Regularisation/index.html deleted file mode 100644 index 890eabfc6..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Regularisation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Regularisation (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Regularisation)

Module Optimise.Regularisation

Regularisation module

type typ =
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None

Types of regularisation functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Stopping/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Stopping/index.html deleted file mode 100644 index 6e34552df..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Stopping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Stopping (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Stopping)

Module Optimise.Stopping

Stopping module

type typ =
  1. | Const of float
  2. | Early of int * int
  3. | None

Types of stopping functions.

val run : typ -> float -> bool

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Utils/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Utils/index.html deleted file mode 100644 index 28688414a..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl-base.Owl_neural_graph.Make.Neuron.Optimise.Utils)

Module Optimise.Utils

Utils module

val sample_num : Algodiff.t -> int

Return the total number of samples in passed in ndarray.

val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

draw_samples x y draws samples from both x (observations) and y (labels). The samples will be drew along axis 0, so x and y must agree along axis 0.

val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t

get_chunk x y i c gets a continuous chunk of c samples from position i from x (observations) and y (labels).

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/index.html deleted file mode 100644 index c9ab7aead..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl-base.Owl_neural_graph.Make.Neuron.Optimise)

Module Neuron.Optimise

module Utils : sig ... end

Utils module

module Learning_Rate : sig ... end

Strategies for learning rate update

module Batch : sig ... end

Batch module

module Loss : sig ... end

Loss module

module Gradient : sig ... end

Gradient module

module Momentum : sig ... end

Momentum module

module Regularisation : sig ... end

Regularisation module

module Clipping : sig ... end

Clipping module

module Stopping : sig ... end

Stopping module

module Checkpoint : sig ... end

Checkpoint module

module Params : sig ... end

Params module

Core functions
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises the weight w of passed-in function f.

* f is a function f : w -> x -> y. * w is a row vector but y can have any shape.

val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

This function is specifically designed for minimising the weights in a neural network of graph structure. In Owl's earlier versions, the functions in the regression module were actually implemented using this function.

val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises f : x -> y w.r.t x.

x is an ndarray; and y is an scalar value.

val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

TODO

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Padding1D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Padding1D/index.html deleted file mode 100644 index cd58b3280..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Padding1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding1D (owl-base.Owl_neural_graph.Make.Neuron.Padding1D)

Module Neuron.Padding1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Padding2D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Padding2D/index.html deleted file mode 100644 index 17bc7479a..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Padding2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding2D (owl-base.Owl_neural_graph.Make.Neuron.Padding2D)

Module Neuron.Padding2D

type neuron_typ = {
  1. mutable padding : int array array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : int array array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Padding3D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Padding3D/index.html deleted file mode 100644 index c07460aed..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Padding3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding3D (owl-base.Owl_neural_graph.Make.Neuron.Padding3D)

Module Neuron.Padding3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Recurrent/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Recurrent/index.html deleted file mode 100644 index 96055d474..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Recurrent/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Recurrent (owl-base.Owl_neural_graph.Make.Neuron.Recurrent)

Module Neuron.Recurrent

type neuron_typ = {
  1. mutable whh : Optimise.Algodiff.t;
  2. mutable wxh : Optimise.Algodiff.t;
  3. mutable why : Optimise.Algodiff.t;
  4. mutable bh : Optimise.Algodiff.t;
  5. mutable by : Optimise.Algodiff.t;
  6. mutable h : Optimise.Algodiff.t;
  7. mutable hiddens : int;
  8. mutable act : Activation.typ;
  9. mutable init_typ : Init.typ;
  10. mutable in_shape : int array;
  11. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?time_steps:int -> - ?inputs:int -> - int -> - int -> - Activation.typ -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Reshape/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Reshape/index.html deleted file mode 100644 index df0146462..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Reshape/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Reshape (owl-base.Owl_neural_graph.Make.Neuron.Reshape)

Module Neuron.Reshape

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Slice/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Slice/index.html deleted file mode 100644 index b3568f4b1..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/Slice/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Slice (owl-base.Owl_neural_graph.Make.Neuron.Slice)

Module Neuron.Slice

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
  3. mutable slice : int list list;
}

Neuron type definition.

val create : int list list -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/TransposeConv1D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/TransposeConv1D/index.html deleted file mode 100644 index 463cc3454..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/TransposeConv1D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv1D (owl-base.Owl_neural_graph.Make.Neuron.TransposeConv1D)

Module Neuron.TransposeConv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/TransposeConv2D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/TransposeConv2D/index.html deleted file mode 100644 index d77de180f..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/TransposeConv2D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv2D (owl-base.Owl_neural_graph.Make.Neuron.TransposeConv2D)

Module Neuron.TransposeConv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/TransposeConv3D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/TransposeConv3D/index.html deleted file mode 100644 index ac8257b01..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/TransposeConv3D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv3D (owl-base.Owl_neural_graph.Make.Neuron.TransposeConv3D)

Module Neuron.TransposeConv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/UpSampling1D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/UpSampling1D/index.html deleted file mode 100644 index 8c8c1e79a..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/UpSampling1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling1D (owl-base.Owl_neural_graph.Make.Neuron.UpSampling1D)

Module Neuron.UpSampling1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/UpSampling2D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/UpSampling2D/index.html deleted file mode 100644 index 20b9a96bd..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/UpSampling2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling2D (owl-base.Owl_neural_graph.Make.Neuron.UpSampling2D)

Module Neuron.UpSampling2D

type neuron_typ = {
  1. mutable size : int array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/UpSampling3D/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/UpSampling3D/index.html deleted file mode 100644 index ccbfca582..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/UpSampling3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling3D (owl-base.Owl_neural_graph.Make.Neuron.UpSampling3D)

Module Neuron.UpSampling3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/index.html b/owl-base/Owl_neural_graph/Make/argument-1-Neuron/index.html deleted file mode 100644 index 41dc6026f..000000000 --- a/owl-base/Owl_neural_graph/Make/argument-1-Neuron/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Neuron (owl-base.Owl_neural_graph.Make.Neuron)

Parameter Make.Neuron

Init neuron
module Init : sig ... end
Input neuron
module Input : sig ... end
Activation neuron
module Activation : sig ... end
Linear neuron
module Linear : sig ... end
LinearNoBias neuron
module LinearNoBias : sig ... end
Recurrent neuron
module Recurrent : sig ... end
LSTM neuron
module LSTM : sig ... end
GRU neuron
module GRU : sig ... end
Conv1D neuron
module Conv1D : sig ... end
Conv2D neuron
module Conv2D : sig ... end
Conv3D neuron
module Conv3D : sig ... end
DilatedConv1D neuron
module DilatedConv1D : sig ... end
DilatedConv2D neuron
module DilatedConv2D : sig ... end
DilatedConv3D neuron
module DilatedConv3D : sig ... end
TransposeConv1D neuron
module TransposeConv1D : sig ... end
TransposeConv2D neuron
module TransposeConv2D : sig ... end
TransposeConv3D neuron
module TransposeConv3D : sig ... end
FullyConnected neuron
module FullyConnected : sig ... end
MaxPool1D neuron
module MaxPool1D : sig ... end
MaxPool2D neuron
module MaxPool2D : sig ... end
AvgPool1D neuron
module AvgPool1D : sig ... end
AvgPool2D neuron
module AvgPool2D : sig ... end
GlobalMaxPool1D neuron
module GlobalMaxPool1D : sig ... end
GlobalMaxPool2D neuron
module GlobalMaxPool2D : sig ... end
GlobalAvgPool1D neuron
module GlobalAvgPool1D : sig ... end
GlobalAvgPool2D neuron
module GlobalAvgPool2D : sig ... end
UpSampling1D neuron
module UpSampling1D : sig ... end
UpSampling2D neuron
module UpSampling2D : sig ... end
UpSampling3D neuron
module UpSampling3D : sig ... end
Padding1D neuron
module Padding1D : sig ... end
Padding2D neuron
module Padding2D : sig ... end
Padding3D neuron
module Padding3D : sig ... end
Lambda neuron
module Lambda : sig ... end
LambdaArray neuron
module LambdaArray : sig ... end
Dropout neuron
module Dropout : sig ... end
Reshape neuron
module Reshape : sig ... end
Flatten neuron
module Flatten : sig ... end
Slice neuron
module Slice : sig ... end
Add neuron
module Add : sig ... end
Mul neuron
module Mul : sig ... end
Dot neuron
module Dot : sig ... end
Max neuron
module Max : sig ... end
Average neuron
module Average : sig ... end
Concatenate neuron
module Concatenate : sig ... end
Normalisation neuron
module Normalisation : sig ... end
GaussianNoise neuron
module GaussianNoise : sig ... end
GaussianDropout neuron
module GaussianDropout : sig ... end
AlphaDropout neuron
module AlphaDropout : sig ... end
Embedding neuron
module Embedding : sig ... end
Masking neuron
module Masking : sig ... end
Core functions
type neuron =
  1. | Input of Input.neuron_typ
  2. | Linear of Linear.neuron_typ
  3. | LinearNoBias of LinearNoBias.neuron_typ
  4. | Embedding of Embedding.neuron_typ
  5. | LSTM of LSTM.neuron_typ
  6. | GRU of GRU.neuron_typ
  7. | Recurrent of Recurrent.neuron_typ
  8. | Conv1D of Conv1D.neuron_typ
  9. | Conv2D of Conv2D.neuron_typ
  10. | Conv3D of Conv3D.neuron_typ
  11. | DilatedConv1D of DilatedConv1D.neuron_typ
  12. | DilatedConv2D of DilatedConv2D.neuron_typ
  13. | DilatedConv3D of DilatedConv3D.neuron_typ
  14. | TransposeConv1D of TransposeConv1D.neuron_typ
  15. | TransposeConv2D of TransposeConv2D.neuron_typ
  16. | TransposeConv3D of TransposeConv3D.neuron_typ
  17. | FullyConnected of FullyConnected.neuron_typ
  18. | MaxPool1D of MaxPool1D.neuron_typ
  19. | MaxPool2D of MaxPool2D.neuron_typ
  20. | AvgPool1D of AvgPool1D.neuron_typ
  21. | AvgPool2D of AvgPool2D.neuron_typ
  22. | GlobalMaxPool1D of GlobalMaxPool1D.neuron_typ
  23. | GlobalMaxPool2D of GlobalMaxPool2D.neuron_typ
  24. | GlobalAvgPool1D of GlobalAvgPool1D.neuron_typ
  25. | GlobalAvgPool2D of GlobalAvgPool2D.neuron_typ
  26. | UpSampling2D of UpSampling2D.neuron_typ
  27. | Padding2D of Padding2D.neuron_typ
  28. | Dropout of Dropout.neuron_typ
  29. | Reshape of Reshape.neuron_typ
  30. | Flatten of Flatten.neuron_typ
  31. | Slice of Slice.neuron_typ
  32. | Lambda of Lambda.neuron_typ
  33. | LambdaArray of LambdaArray.neuron_typ
  34. | Activation of Activation.neuron_typ
  35. | GaussianNoise of GaussianNoise.neuron_typ
  36. | GaussianDropout of GaussianDropout.neuron_typ
  37. | AlphaDropout of AlphaDropout.neuron_typ
  38. | Normalisation of Normalisation.neuron_typ
  39. | Add of Add.neuron_typ
  40. | Mul of Mul.neuron_typ
  41. | Dot of Dot.neuron_typ
  42. | Max of Max.neuron_typ
  43. | Average of Average.neuron_typ
  44. | Concatenate of Concatenate.neuron_typ
    (*

    Types of neuron.

    *)
val get_in_out_shape : neuron -> int array * int array

Get both input and output shapes of a neuron.

val get_in_shape : neuron -> int array

Get the input shape of a neuron.

val get_out_shape : neuron -> int array

Get the output shape of a neuron.

val connect : int array array -> neuron -> unit

Connect this neuron to others in a neural network.

val init : neuron -> unit

Initialise the neuron and its parameters.

val reset : neuron -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron -> Optimise.Algodiff.t array

Assemble all the trainable parameters in an array, used by Optimise module.

val mkpri : neuron -> Optimise.Algodiff.t array

Assemble all the primal values in an array, used by Optimise module.

val mkadj : neuron -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron -> Optimise.Algodiff.t array -> unit

Update trainable parameters in a neuron, used by Optimise module.

val load_weights : neuron -> Optimise.Algodiff.t array -> unit

Load both trainable and non-trainable parameters into the neuron.

val save_weights : neuron -> Optimise.Algodiff.t array

Assemble both trainable and non-trainable parameters of the neuron.

val copy : neuron -> neuron

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : neuron -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph/Make/index.html b/owl-base/Owl_neural_graph/Make/index.html deleted file mode 100644 index 2004fa51c..000000000 --- a/owl-base/Owl_neural_graph/Make/index.html +++ /dev/null @@ -1,243 +0,0 @@ - -Make (owl-base.Owl_neural_graph.Make)

Module Owl_neural_graph.Make

Parameters

Signature

module Neuron = Neuron
type node = {
  1. mutable name : string;
  2. mutable prev : node array;
  3. mutable next : node array;
  4. mutable neuron : Neuron.neuron;
  5. mutable output : Neuron.Optimise.Algodiff.t option;
  6. mutable network : network;
  7. mutable train : bool;
}
and network = {
  1. mutable nnid : string;
  2. mutable size : int;
  3. mutable roots : node array;
  4. mutable outputs : node array;
  5. mutable topo : node array;
}
val make_network : ?nnid:string -> int -> node array -> node array -> network
val make_node : - ?name:string -> - ?train:bool -> - node array -> - node array -> - Neuron.neuron -> - Neuron.Optimise.Algodiff.t option -> - network -> - node
val get_roots : network -> node array
val get_outputs : network -> node array
val get_node : network -> string -> node
val get_network : ?name:string -> node -> network
val outputs : ?name:string -> node array -> network
val get_network_name : network -> string
val set_network_name : network -> string -> unit
val input_shape : network -> int array
val input_shapes : network -> int array array
val collect_output : node array -> Neuron.Optimise.Algodiff.t array
val connect_pair : node -> node -> unit
val connect_to_parents : node array -> node -> unit
val add_node : - ?act_typ:Neuron.Activation.typ -> - network -> - node array -> - node -> - node
val init : network -> unit
val reset : network -> unit
val mktag : int -> network -> unit
val mkpar : network -> Neuron.Optimise.Algodiff.t array array
val mkpri : network -> Neuron.Optimise.Algodiff.t array array
val mkadj : network -> Neuron.Optimise.Algodiff.t array array
val update : network -> Neuron.Optimise.Algodiff.t array array -> unit
val run_inputs : - Neuron.Optimise.Algodiff.t array -> - network -> - Neuron.Optimise.Algodiff.t array
val forward_inputs : - network -> - Neuron.Optimise.Algodiff.t array -> - Neuron.Optimise.Algodiff.t array * Neuron.Optimise.Algodiff.t array array
val backward : - network -> - Neuron.Optimise.Algodiff.t -> - Neuron.Optimise.Algodiff.t array array - * Neuron.Optimise.Algodiff.t array array
val copy : network -> network
val _remove_training_nodes : network -> unit
val model_inputs : - network -> - Neuron.Optimise.Algodiff.A.arr array -> - Neuron.Optimise.Algodiff.A.arr array
val input : ?name:string -> int array -> node
val inputs : ?names:string array -> int array array -> node array
val activation : ?name:string -> Neuron.Activation.typ -> node -> node
val linear : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val linear_nobias : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val embedding : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node
val recurrent : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node
val lstm : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node
val gru : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node
val conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val dilated_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val dilated_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val dilated_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val transpose_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val transpose_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val transpose_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val fully_connected : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val max_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val max_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val avg_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val avg_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val global_max_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_max_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_avg_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_avg_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val upsampling2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - node -> - node
val padding2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array array -> - node -> - node
val dropout : ?name:string -> float -> node -> node
val gaussian_noise : ?name:string -> float -> node -> node
val gaussian_dropout : ?name:string -> float -> node -> node
val alpha_dropout : ?name:string -> float -> node -> node
val normalisation : - ?name:string -> - ?axis:int -> - ?training:bool -> - ?decay:float -> - ?mu:Neuron.Optimise.Algodiff.A.arr -> - ?var:Neuron.Optimise.Algodiff.A.arr -> - node -> - node
val reshape : ?name:string -> int array -> node -> node
val flatten : ?name:string -> node -> node
val slice : ?name:string -> int list list -> node -> node
val lambda : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - ?out_shape:int array -> - (Neuron.Optimise.Algodiff.t -> Neuron.Optimise.Algodiff.t) -> - node -> - node
val lambda_array : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - (Neuron.Optimise.Algodiff.t array -> Neuron.Optimise.Algodiff.t) -> - node array -> - node
val add : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val mul : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val dot : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val max : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val average : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node array -> - node
val concatenate : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int -> - node array -> - node
val to_string : network -> string
val pp_network : Stdlib.Format.formatter -> network -> unit
val print : network -> unit
val save : ?unsafe:bool -> network -> string -> unit
val load : string -> network
val save_weights : network -> string -> unit
val load_weights : network -> string -> unit
val make_subnetwork : - ?copy:bool -> - ?make_inputs:string array -> - network -> - string array -> - network
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/.dummy b/owl-base/Owl_neural_graph_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Activation/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Activation/index.html deleted file mode 100644 index 6adeb915c..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Activation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Activation (owl-base.Owl_neural_graph_sig.Sig.Neuron.Activation)

Module Neuron.Activation

type typ =
  1. | Elu
  2. | Relu
  3. | Sigmoid
  4. | HardSigmoid
  5. | Softmax of int
  6. | Softplus
  7. | Softsign
  8. | Tanh
  9. | Relu6
  10. | LeakyRelu of float
  11. | TRelu of float
  12. | Custom of Optimise.Algodiff.t -> Optimise.Algodiff.t
  13. | None
    (*

    Types of activation functions.

    *)
type neuron_typ = {
  1. mutable activation : typ;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val run_activation : Optimise.Algodiff.t -> typ -> Optimise.Algodiff.t

Run one specific activation function.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val activation_to_string : typ -> string

Return the name of a specific activation function.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Add/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Add/index.html deleted file mode 100644 index 4d57a0bec..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Add/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Add (owl-base.Owl_neural_graph_sig.Sig.Neuron.Add)

Module Neuron.Add

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/AlphaDropout/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/AlphaDropout/index.html deleted file mode 100644 index fe7e439c6..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/AlphaDropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AlphaDropout (owl-base.Owl_neural_graph_sig.Sig.Neuron.AlphaDropout)

Module Neuron.AlphaDropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Average/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Average/index.html deleted file mode 100644 index ba90cccf3..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Average/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Average (owl-base.Owl_neural_graph_sig.Sig.Neuron.Average)

Module Neuron.Average

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/AvgPool1D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/AvgPool1D/index.html deleted file mode 100644 index 267c7dfff..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/AvgPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AvgPool1D (owl-base.Owl_neural_graph_sig.Sig.Neuron.AvgPool1D)

Module Neuron.AvgPool1D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/AvgPool2D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/AvgPool2D/index.html deleted file mode 100644 index 03683b616..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/AvgPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AvgPool2D (owl-base.Owl_neural_graph_sig.Sig.Neuron.AvgPool2D)

Module Neuron.AvgPool2D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Concatenate/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Concatenate/index.html deleted file mode 100644 index 7f10769e4..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Concatenate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Concatenate (owl-base.Owl_neural_graph_sig.Sig.Neuron.Concatenate)

Module Neuron.Concatenate

type neuron_typ = {
  1. mutable axis : int;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : int -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Conv1D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Conv1D/index.html deleted file mode 100644 index 3eff861bc..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Conv1D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv1D (owl-base.Owl_neural_graph_sig.Sig.Neuron.Conv1D)

Module Neuron.Conv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Conv2D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Conv2D/index.html deleted file mode 100644 index 4cc5ba537..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Conv2D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv2D (owl-base.Owl_neural_graph_sig.Sig.Neuron.Conv2D)

Module Neuron.Conv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Conv3D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Conv3D/index.html deleted file mode 100644 index 99fc66ff4..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Conv3D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv3D (owl-base.Owl_neural_graph_sig.Sig.Neuron.Conv3D)

Module Neuron.Conv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/DilatedConv1D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/DilatedConv1D/index.html deleted file mode 100644 index 71d59816e..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/DilatedConv1D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv1D (owl-base.Owl_neural_graph_sig.Sig.Neuron.DilatedConv1D)

Module Neuron.DilatedConv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/DilatedConv2D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/DilatedConv2D/index.html deleted file mode 100644 index 4ce533f95..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/DilatedConv2D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv2D (owl-base.Owl_neural_graph_sig.Sig.Neuron.DilatedConv2D)

Module Neuron.DilatedConv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/DilatedConv3D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/DilatedConv3D/index.html deleted file mode 100644 index ed7fd0be4..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/DilatedConv3D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv3D (owl-base.Owl_neural_graph_sig.Sig.Neuron.DilatedConv3D)

Module Neuron.DilatedConv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Dot/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Dot/index.html deleted file mode 100644 index 3888ee708..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Dot/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Dot (owl-base.Owl_neural_graph_sig.Sig.Neuron.Dot)

Module Neuron.Dot

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Dropout/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Dropout/index.html deleted file mode 100644 index a9676a6de..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Dropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Dropout (owl-base.Owl_neural_graph_sig.Sig.Neuron.Dropout)

Module Neuron.Dropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Embedding/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Embedding/index.html deleted file mode 100644 index ba86621c7..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Embedding/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Embedding (owl-base.Owl_neural_graph_sig.Sig.Neuron.Embedding)

Module Neuron.Embedding

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_dim : int;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Flatten/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Flatten/index.html deleted file mode 100644 index f87984963..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Flatten/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Flatten (owl-base.Owl_neural_graph_sig.Sig.Neuron.Flatten)

Module Neuron.Flatten

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/FullyConnected/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/FullyConnected/index.html deleted file mode 100644 index ea345c796..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/FullyConnected/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -FullyConnected (owl-base.Owl_neural_graph_sig.Sig.Neuron.FullyConnected)

Module Neuron.FullyConnected

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GRU/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GRU/index.html deleted file mode 100644 index 559f34978..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GRU/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GRU (owl-base.Owl_neural_graph_sig.Sig.Neuron.GRU)

Module Neuron.GRU

type neuron_typ = {
  1. mutable wxz : Optimise.Algodiff.t;
  2. mutable whz : Optimise.Algodiff.t;
  3. mutable wxr : Optimise.Algodiff.t;
  4. mutable whr : Optimise.Algodiff.t;
  5. mutable wxh : Optimise.Algodiff.t;
  6. mutable whh : Optimise.Algodiff.t;
  7. mutable bz : Optimise.Algodiff.t;
  8. mutable br : Optimise.Algodiff.t;
  9. mutable bh : Optimise.Algodiff.t;
  10. mutable h : Optimise.Algodiff.t;
  11. mutable init_typ : Init.typ;
  12. mutable in_shape : int array;
  13. mutable out_shape : int array;
}

Neuron type definition.

val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GaussianDropout/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GaussianDropout/index.html deleted file mode 100644 index 8c11b9faa..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GaussianDropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GaussianDropout (owl-base.Owl_neural_graph_sig.Sig.Neuron.GaussianDropout)

Module Neuron.GaussianDropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GaussianNoise/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GaussianNoise/index.html deleted file mode 100644 index d39a3c3f2..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GaussianNoise/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GaussianNoise (owl-base.Owl_neural_graph_sig.Sig.Neuron.GaussianNoise)

Module Neuron.GaussianNoise

type neuron_typ = {
  1. mutable sigma : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalAvgPool1D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalAvgPool1D/index.html deleted file mode 100644 index 8857c9e0c..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalAvgPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalAvgPool1D (owl-base.Owl_neural_graph_sig.Sig.Neuron.GlobalAvgPool1D)

Module Neuron.GlobalAvgPool1D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalAvgPool2D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalAvgPool2D/index.html deleted file mode 100644 index fb058f419..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalAvgPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalAvgPool2D (owl-base.Owl_neural_graph_sig.Sig.Neuron.GlobalAvgPool2D)

Module Neuron.GlobalAvgPool2D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalMaxPool1D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalMaxPool1D/index.html deleted file mode 100644 index b52837edb..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalMaxPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalMaxPool1D (owl-base.Owl_neural_graph_sig.Sig.Neuron.GlobalMaxPool1D)

Module Neuron.GlobalMaxPool1D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalMaxPool2D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalMaxPool2D/index.html deleted file mode 100644 index b3a5041d3..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/GlobalMaxPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalMaxPool2D (owl-base.Owl_neural_graph_sig.Sig.Neuron.GlobalMaxPool2D)

Module Neuron.GlobalMaxPool2D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Init/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Init/index.html deleted file mode 100644 index 324c1c13a..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Init/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Init (owl-base.Owl_neural_graph_sig.Sig.Neuron.Init)

Module Neuron.Init

type typ =
  1. | Uniform of float * float
  2. | Gaussian of float * float
  3. | Standard
  4. | Tanh
  5. | GlorotNormal
  6. | GlorotUniform
  7. | LecunNormal
  8. | HeNormal
  9. | Custom of int array -> Optimise.Algodiff.t
    (*

    Initialisation types

    *)
val calc_fans : int array -> float * float

Calculate fan-in and fan-out of weights.

val run : typ -> int array -> Optimise.Algodiff.t -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Input/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Input/index.html deleted file mode 100644 index 589aacd34..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Input/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Input (owl-base.Owl_neural_graph_sig.Sig.Neuron.Input)

Module Neuron.Input

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : int array -> neuron_typ

Create the neuron.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/LSTM/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/LSTM/index.html deleted file mode 100644 index 34622590c..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/LSTM/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -LSTM (owl-base.Owl_neural_graph_sig.Sig.Neuron.LSTM)

Module Neuron.LSTM

type neuron_typ = {
  1. mutable wxi : Optimise.Algodiff.t;
  2. mutable whi : Optimise.Algodiff.t;
  3. mutable wxc : Optimise.Algodiff.t;
  4. mutable whc : Optimise.Algodiff.t;
  5. mutable wxf : Optimise.Algodiff.t;
  6. mutable whf : Optimise.Algodiff.t;
  7. mutable wxo : Optimise.Algodiff.t;
  8. mutable who : Optimise.Algodiff.t;
  9. mutable bi : Optimise.Algodiff.t;
  10. mutable bc : Optimise.Algodiff.t;
  11. mutable bf : Optimise.Algodiff.t;
  12. mutable bo : Optimise.Algodiff.t;
  13. mutable c : Optimise.Algodiff.t;
  14. mutable h : Optimise.Algodiff.t;
  15. mutable init_typ : Init.typ;
  16. mutable in_shape : int array;
  17. mutable out_shape : int array;
}

Neuron type definition.

val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Lambda/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Lambda/index.html deleted file mode 100644 index 5bc3db86d..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Lambda/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Lambda (owl-base.Owl_neural_graph_sig.Sig.Neuron.Lambda)

Module Neuron.Lambda

type neuron_typ = {
  1. mutable lambda : Optimise.Algodiff.t -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?out_shape:int array -> - (Optimise.Algodiff.t -> Optimise.Algodiff.t) -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/LambdaArray/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/LambdaArray/index.html deleted file mode 100644 index 68d64311d..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/LambdaArray/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -LambdaArray (owl-base.Owl_neural_graph_sig.Sig.Neuron.LambdaArray)

Module Neuron.LambdaArray

type neuron_typ = {
  1. mutable lambda : Optimise.Algodiff.t array -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : - int array -> - (Optimise.Algodiff.t array -> Optimise.Algodiff.t) -> - neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Linear/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Linear/index.html deleted file mode 100644 index 95154a514..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Linear/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Linear (owl-base.Owl_neural_graph_sig.Sig.Neuron.Linear)

Module Neuron.Linear

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/LinearNoBias/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/LinearNoBias/index.html deleted file mode 100644 index 024dbff88..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/LinearNoBias/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -LinearNoBias (owl-base.Owl_neural_graph_sig.Sig.Neuron.LinearNoBias)

Module Neuron.LinearNoBias

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_shape : int array;
  4. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Masking/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Masking/index.html deleted file mode 100644 index b29bfd967..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Masking/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Masking (owl-base.Owl_neural_graph_sig.Sig.Neuron.Masking)

Module Neuron.Masking

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Max/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Max/index.html deleted file mode 100644 index d5a9c77e9..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Max/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Max (owl-base.Owl_neural_graph_sig.Sig.Neuron.Max)

Module Neuron.Max

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/MaxPool1D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/MaxPool1D/index.html deleted file mode 100644 index fa783611f..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/MaxPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MaxPool1D (owl-base.Owl_neural_graph_sig.Sig.Neuron.MaxPool1D)

Module Neuron.MaxPool1D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/MaxPool2D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/MaxPool2D/index.html deleted file mode 100644 index f136b6ec1..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/MaxPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MaxPool2D (owl-base.Owl_neural_graph_sig.Sig.Neuron.MaxPool2D)

Module Neuron.MaxPool2D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Mul/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Mul/index.html deleted file mode 100644 index 47de4003f..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Mul/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mul (owl-base.Owl_neural_graph_sig.Sig.Neuron.Mul)

Module Neuron.Mul

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Normalisation/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Normalisation/index.html deleted file mode 100644 index 474d3617a..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Normalisation/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Normalisation (owl-base.Owl_neural_graph_sig.Sig.Neuron.Normalisation)

Module Neuron.Normalisation

type neuron_typ = {
  1. mutable axis : int;
  2. mutable beta : Optimise.Algodiff.t;
  3. mutable gamma : Optimise.Algodiff.t;
  4. mutable mu : Optimise.Algodiff.t;
  5. mutable var : Optimise.Algodiff.t;
  6. mutable decay : Optimise.Algodiff.t;
  7. mutable training : bool;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?training:bool -> - ?decay:float -> - ?mu:Optimise.Algodiff.A.arr -> - ?var:Optimise.Algodiff.A.arr -> - int -> - neuron_typ

Create the neuron. Note that axis 0 is the batch axis.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the trainable parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update trainable parameters of the neuron, used by Optimise module.

val load_weights : neuron_typ -> Optimise.Algodiff.t array -> unit

Load both trainable and non-trainable parameters into the neuron.

val save_weights : neuron_typ -> Optimise.Algodiff.t array

Assemble both trainable and non-trainable parameters of the neuron.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/Linalg/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index b30e710ed..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/Mat/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index 5a88de4ca..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/Scalar/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index eefbeb73b..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/index.html deleted file mode 100644 index f4a273520..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.A)

Module Algodiff.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Arr/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Arr/index.html deleted file mode 100644 index 367666024..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/index.html deleted file mode 100644 index e0995c9f7..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.Builder)

Module Algodiff.Builder

Ops Builder
module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t

build array input single output operations

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index 5e9d79629..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index 49ed44a36..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index 71b497595..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index f77df5444..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index cca7cf480..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index 061fa1723..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Linalg/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index 731ff1b0e..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val logdet : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val chol : ?upper:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val qr : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val lq : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val svd : ?thin:bool -> t -> t * t * t

Refer to :doc:`owl_dense_ndarray_generic`

val sylvester : t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val lyapunov : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - t -> - t -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val (/@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val care : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dare : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Mat/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Mat/index.html deleted file mode 100644 index e6332fbfb..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Maths/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Maths/index.html deleted file mode 100644 index dd4241a73..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (-) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (/) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (**) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val add : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val div : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val kron : t -> t -> t

Refer to :doc:`owl_dense_matrix_generic`

val dot : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cross_entropy : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val abs : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val round : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mean : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val swap : int -> int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_item : t -> int -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_row : t -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val concat : axis:int -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val split : axis:int -> int array -> t -> t array

Refer to :doc:`owl_dense_ndarray_generic`

val of_arrays : t array array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val to_arrays : t -> t array array

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diag : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diagm : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val trace : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val triu : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tril : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/NN/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/NN/index.html deleted file mode 100644 index 94eef9a29..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:A.elt -> int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/index.html deleted file mode 100644 index f01178fe9..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Algodiff/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Algodiff (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Algodiff)

Module Optimise.Algodiff

include Owl_algodiff_core_sig.Sig
Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

val make_forward : t -> t -> int -> t

TODO

val make_reverse : t -> int -> t

TODO

val reverse_prop : t -> t -> unit

TODO

val diff : (t -> t) -> t -> t

diff f x returns the exat derivative of a function f : scalar -> scalar at point x. Simply calling diff f will return its derivative function g of the same type, i.e. g : scalar -> scalar.

Keep calling this function will give you higher-order derivatives of f, i.e. f |> diff |> diff |> diff |> ...

val diff' : (t -> t) -> t -> t * t

similar to diff, but return (f x, diff f x).

val grad : (t -> t) -> t -> t

gradient of f : (vector -> scalar) at x, reverse ad.

val grad' : (t -> t) -> t -> t * t

similar to grad, but return (f x, grad f x).

val jacobian : (t -> t) -> t -> t

jacobian of f : (vector -> vector) at x, both x and y are row vectors.

val jacobian' : (t -> t) -> t -> t * t

similar to jacobian, but return (f x, jacobian f x)

val jacobianv : (t -> t) -> t -> t -> t

jacobian vector product of f : (vector -> vector) at x along v, forward ad. Namely, it calcultes (jacobian x) v

val jacobianv' : (t -> t) -> t -> t -> t * t

similar to jacobianv', but return (f x, jacobianv f x v)

val jacobianTv : (t -> t) -> t -> t -> t

transposed jacobian vector product of f : (vector -> vector) at x along v, backward ad. Namely, it calculates transpose ((jacobianv f x v)).

val jacobianTv' : (t -> t) -> t -> t -> t * t

similar to jacobianTv, but return (f x, transpose (jacobianv f x v))

val hessian : (t -> t) -> t -> t

hessian of f : (scalar -> scalar) at x.

val hessian' : (t -> t) -> t -> t * t

simiarl to hessian, but return (f x, hessian f x)

val hessianv : (t -> t) -> t -> t -> t

hessian vector product of f : (scalar -> scalar) at x along v. Namely, it calculates (hessian x) v.

val hessianv' : (t -> t) -> t -> t -> t * t

similar to hessianv, but return (f x, hessianv f x v).

val laplacian : (t -> t) -> t -> t

laplacian of f : (scalar -> scalar) at x.

val laplacian' : (t -> t) -> t -> t * t

similar to laplacian, but return (f x, laplacian f x).

val gradhessian : (t -> t) -> t -> t * t

return (grad f x, hessian f x), f : (scalar -> scalar)

val gradhessian' : (t -> t) -> t -> t * t * t

return (f x, grad f x, hessian f x)

val gradhessianv : (t -> t) -> t -> t -> t * t

return (grad f x v, hessian f x v)

val gradhessianv' : (t -> t) -> t -> t -> t * t * t

return (f x, grad f x v, hessian f x v)

include Owl_algodiff_ops_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
module Builder : - Owl_algodiff_ops_builder_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
Supported Maths functions
module Maths : sig ... end
Supported Linalg functions
module Linalg : sig ... end
Supported Neural Network functions
module NN : sig ... end
Supported Mat functions
module Mat : sig ... end
Supported Arr functions
module Arr : sig ... end
Helper functions
include Owl_algodiff_graph_convert_sig.Sig with type t := t
val to_trace : t list -> string

to_trace [t0; t1; ...] outputs the trace of computation graph on the terminal in a human-readable format.

val to_dot : t list -> string

to_dot [t0; t1; ...] outputs the trace of computation graph in the dot file format which you can use other tools further visualisation, such as Graphviz.

val pp_num : Stdlib.Format.formatter -> t -> unit

pp_num t pretty prints the abstract number used in Algodiff.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Batch/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Batch/index.html deleted file mode 100644 index 39967c4c9..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Batch/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Batch (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Batch)

Module Optimise.Batch

Batch module

type typ =
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic

Types of batches.

val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

Execute the computations defined in module typ.

val batches : typ -> Algodiff.t -> int

Return the total number of batches given a batch typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Checkpoint/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Checkpoint/index.html deleted file mode 100644 index 8270d39f3..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Checkpoint/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Checkpoint (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Checkpoint)

Module Optimise.Checkpoint

Checkpoint module

type state = {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}

Type definition of checkpoint

type typ =
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None

Batch type.

val init_state : int -> float -> state

init_state batches_per_epoch epochs initialises a state by specifying the number of batches per epoch and the number of epochs in total.

val default_checkpoint_fun : (string -> 'a) -> 'a

This function is used for saving intermediate files during optimisation.

val print_state_info : state -> unit

Print out the detail information of current state.

val print_summary : state -> unit

Print out the summary of current state.

val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Clipping/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Clipping/index.html deleted file mode 100644 index e827dcbab..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Clipping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Clipping (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Clipping)

Module Optimise.Clipping

Clipping module

type typ =
  1. | L2norm of float
  2. | Value of float * float
  3. | None

Types of clipping functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Gradient/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Gradient/index.html deleted file mode 100644 index deea4abf1..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Gradient/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Gradient (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Gradient)

Module Optimise.Gradient

Gradient module

type typ =
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton

Types of gradient function.

val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Learning_Rate/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Learning_Rate/index.html deleted file mode 100644 index ae881b877..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Learning_Rate (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

Strategies for learning rate update

type typ =
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array

Representation of learning rate update strategies. Possible values include:

  • Adam (alpha, beta1, beta2), see ref for parameter meaning
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array

Update the cache of gradients.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Loss/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Loss/index.html deleted file mode 100644 index f5074cf6e..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Loss/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Loss (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Loss)

Module Optimise.Loss

Loss module

type typ =
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t

Types of loss functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Momentum/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Momentum/index.html deleted file mode 100644 index 0571ca1e5..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Momentum/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Momentum (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Momentum)

Module Optimise.Momentum

Momentum module

type typ =
  1. | Standard of float
  2. | Nesterov of float
  3. | None

Types of momentum functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Params/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Params/index.html deleted file mode 100644 index 34a53bc48..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Params/index.html +++ /dev/null @@ -1,14 +0,0 @@ - -Params (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Params)

Module Optimise.Params

Params module

type typ = {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}

Type definition of parameter.

val default : unit -> typ

Create module typ with default values.

val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ

This function creates a parameter object with many configurations.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Regularisation/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Regularisation/index.html deleted file mode 100644 index f7042d603..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Regularisation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Regularisation (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Regularisation)

Module Optimise.Regularisation

Regularisation module

type typ =
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None

Types of regularisation functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Stopping/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Stopping/index.html deleted file mode 100644 index f3ea43dd5..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Stopping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Stopping (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Stopping)

Module Optimise.Stopping

Stopping module

type typ =
  1. | Const of float
  2. | Early of int * int
  3. | None

Types of stopping functions.

val run : typ -> float -> bool

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Utils/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Utils/index.html deleted file mode 100644 index 64c7d69e1..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise.Utils)

Module Optimise.Utils

Utils module

val sample_num : Algodiff.t -> int

Return the total number of samples in passed in ndarray.

val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

draw_samples x y draws samples from both x (observations) and y (labels). The samples will be drew along axis 0, so x and y must agree along axis 0.

val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t

get_chunk x y i c gets a continuous chunk of c samples from position i from x (observations) and y (labels).

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/index.html deleted file mode 100644 index 79da077e5..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl-base.Owl_neural_graph_sig.Sig.Neuron.Optimise)

Module Neuron.Optimise

module Utils : sig ... end

Utils module

module Learning_Rate : sig ... end

Strategies for learning rate update

module Batch : sig ... end

Batch module

module Loss : sig ... end

Loss module

module Gradient : sig ... end

Gradient module

module Momentum : sig ... end

Momentum module

module Regularisation : sig ... end

Regularisation module

module Clipping : sig ... end

Clipping module

module Stopping : sig ... end

Stopping module

module Checkpoint : sig ... end

Checkpoint module

module Params : sig ... end

Params module

Core functions
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises the weight w of passed-in function f.

* f is a function f : w -> x -> y. * w is a row vector but y can have any shape.

val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

This function is specifically designed for minimising the weights in a neural network of graph structure. In Owl's earlier versions, the functions in the regression module were actually implemented using this function.

val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises f : x -> y w.r.t x.

x is an ndarray; and y is an scalar value.

val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

TODO

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Padding1D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Padding1D/index.html deleted file mode 100644 index 6e64d8935..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Padding1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding1D (owl-base.Owl_neural_graph_sig.Sig.Neuron.Padding1D)

Module Neuron.Padding1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Padding2D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Padding2D/index.html deleted file mode 100644 index e2cfae798..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Padding2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding2D (owl-base.Owl_neural_graph_sig.Sig.Neuron.Padding2D)

Module Neuron.Padding2D

type neuron_typ = {
  1. mutable padding : int array array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : int array array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Padding3D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Padding3D/index.html deleted file mode 100644 index 6321d19e2..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Padding3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding3D (owl-base.Owl_neural_graph_sig.Sig.Neuron.Padding3D)

Module Neuron.Padding3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Recurrent/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Recurrent/index.html deleted file mode 100644 index 8cf3cb0ab..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Recurrent/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Recurrent (owl-base.Owl_neural_graph_sig.Sig.Neuron.Recurrent)

Module Neuron.Recurrent

type neuron_typ = {
  1. mutable whh : Optimise.Algodiff.t;
  2. mutable wxh : Optimise.Algodiff.t;
  3. mutable why : Optimise.Algodiff.t;
  4. mutable bh : Optimise.Algodiff.t;
  5. mutable by : Optimise.Algodiff.t;
  6. mutable h : Optimise.Algodiff.t;
  7. mutable hiddens : int;
  8. mutable act : Activation.typ;
  9. mutable init_typ : Init.typ;
  10. mutable in_shape : int array;
  11. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?time_steps:int -> - ?inputs:int -> - int -> - int -> - Activation.typ -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Reshape/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Reshape/index.html deleted file mode 100644 index f248169e9..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Reshape/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Reshape (owl-base.Owl_neural_graph_sig.Sig.Neuron.Reshape)

Module Neuron.Reshape

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Slice/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Slice/index.html deleted file mode 100644 index a49b45efa..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/Slice/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Slice (owl-base.Owl_neural_graph_sig.Sig.Neuron.Slice)

Module Neuron.Slice

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
  3. mutable slice : int list list;
}

Neuron type definition.

val create : int list list -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/TransposeConv1D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/TransposeConv1D/index.html deleted file mode 100644 index a58973534..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/TransposeConv1D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv1D (owl-base.Owl_neural_graph_sig.Sig.Neuron.TransposeConv1D)

Module Neuron.TransposeConv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/TransposeConv2D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/TransposeConv2D/index.html deleted file mode 100644 index 05e888deb..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/TransposeConv2D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv2D (owl-base.Owl_neural_graph_sig.Sig.Neuron.TransposeConv2D)

Module Neuron.TransposeConv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/TransposeConv3D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/TransposeConv3D/index.html deleted file mode 100644 index 878be20fd..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/TransposeConv3D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv3D (owl-base.Owl_neural_graph_sig.Sig.Neuron.TransposeConv3D)

Module Neuron.TransposeConv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/UpSampling1D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/UpSampling1D/index.html deleted file mode 100644 index 36d3e741d..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/UpSampling1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling1D (owl-base.Owl_neural_graph_sig.Sig.Neuron.UpSampling1D)

Module Neuron.UpSampling1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/UpSampling2D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/UpSampling2D/index.html deleted file mode 100644 index e9d22c979..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/UpSampling2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling2D (owl-base.Owl_neural_graph_sig.Sig.Neuron.UpSampling2D)

Module Neuron.UpSampling2D

type neuron_typ = {
  1. mutable size : int array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/UpSampling3D/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/UpSampling3D/index.html deleted file mode 100644 index 98a101628..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/UpSampling3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling3D (owl-base.Owl_neural_graph_sig.Sig.Neuron.UpSampling3D)

Module Neuron.UpSampling3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/index.html deleted file mode 100644 index ec09aeacc..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/Neuron/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Neuron (owl-base.Owl_neural_graph_sig.Sig.Neuron)

Module Sig.Neuron

Init neuron
module Init : sig ... end
Input neuron
module Input : sig ... end
Activation neuron
module Activation : sig ... end
Linear neuron
module Linear : sig ... end
LinearNoBias neuron
module LinearNoBias : sig ... end
Recurrent neuron
module Recurrent : sig ... end
LSTM neuron
module LSTM : sig ... end
GRU neuron
module GRU : sig ... end
Conv1D neuron
module Conv1D : sig ... end
Conv2D neuron
module Conv2D : sig ... end
Conv3D neuron
module Conv3D : sig ... end
DilatedConv1D neuron
module DilatedConv1D : sig ... end
DilatedConv2D neuron
module DilatedConv2D : sig ... end
DilatedConv3D neuron
module DilatedConv3D : sig ... end
TransposeConv1D neuron
module TransposeConv1D : sig ... end
TransposeConv2D neuron
module TransposeConv2D : sig ... end
TransposeConv3D neuron
module TransposeConv3D : sig ... end
FullyConnected neuron
module FullyConnected : sig ... end
MaxPool1D neuron
module MaxPool1D : sig ... end
MaxPool2D neuron
module MaxPool2D : sig ... end
AvgPool1D neuron
module AvgPool1D : sig ... end
AvgPool2D neuron
module AvgPool2D : sig ... end
GlobalMaxPool1D neuron
module GlobalMaxPool1D : sig ... end
GlobalMaxPool2D neuron
module GlobalMaxPool2D : sig ... end
GlobalAvgPool1D neuron
module GlobalAvgPool1D : sig ... end
GlobalAvgPool2D neuron
module GlobalAvgPool2D : sig ... end
UpSampling1D neuron
module UpSampling1D : sig ... end
UpSampling2D neuron
module UpSampling2D : sig ... end
UpSampling3D neuron
module UpSampling3D : sig ... end
Padding1D neuron
module Padding1D : sig ... end
Padding2D neuron
module Padding2D : sig ... end
Padding3D neuron
module Padding3D : sig ... end
Lambda neuron
module Lambda : sig ... end
LambdaArray neuron
module LambdaArray : sig ... end
Dropout neuron
module Dropout : sig ... end
Reshape neuron
module Reshape : sig ... end
Flatten neuron
module Flatten : sig ... end
Slice neuron
module Slice : sig ... end
Add neuron
module Add : sig ... end
Mul neuron
module Mul : sig ... end
Dot neuron
module Dot : sig ... end
Max neuron
module Max : sig ... end
Average neuron
module Average : sig ... end
Concatenate neuron
module Concatenate : sig ... end
Normalisation neuron
module Normalisation : sig ... end
GaussianNoise neuron
module GaussianNoise : sig ... end
GaussianDropout neuron
module GaussianDropout : sig ... end
AlphaDropout neuron
module AlphaDropout : sig ... end
Embedding neuron
module Embedding : sig ... end
Masking neuron
module Masking : sig ... end
Core functions
type neuron =
  1. | Input of Input.neuron_typ
  2. | Linear of Linear.neuron_typ
  3. | LinearNoBias of LinearNoBias.neuron_typ
  4. | Embedding of Embedding.neuron_typ
  5. | LSTM of LSTM.neuron_typ
  6. | GRU of GRU.neuron_typ
  7. | Recurrent of Recurrent.neuron_typ
  8. | Conv1D of Conv1D.neuron_typ
  9. | Conv2D of Conv2D.neuron_typ
  10. | Conv3D of Conv3D.neuron_typ
  11. | DilatedConv1D of DilatedConv1D.neuron_typ
  12. | DilatedConv2D of DilatedConv2D.neuron_typ
  13. | DilatedConv3D of DilatedConv3D.neuron_typ
  14. | TransposeConv1D of TransposeConv1D.neuron_typ
  15. | TransposeConv2D of TransposeConv2D.neuron_typ
  16. | TransposeConv3D of TransposeConv3D.neuron_typ
  17. | FullyConnected of FullyConnected.neuron_typ
  18. | MaxPool1D of MaxPool1D.neuron_typ
  19. | MaxPool2D of MaxPool2D.neuron_typ
  20. | AvgPool1D of AvgPool1D.neuron_typ
  21. | AvgPool2D of AvgPool2D.neuron_typ
  22. | GlobalMaxPool1D of GlobalMaxPool1D.neuron_typ
  23. | GlobalMaxPool2D of GlobalMaxPool2D.neuron_typ
  24. | GlobalAvgPool1D of GlobalAvgPool1D.neuron_typ
  25. | GlobalAvgPool2D of GlobalAvgPool2D.neuron_typ
  26. | UpSampling2D of UpSampling2D.neuron_typ
  27. | Padding2D of Padding2D.neuron_typ
  28. | Dropout of Dropout.neuron_typ
  29. | Reshape of Reshape.neuron_typ
  30. | Flatten of Flatten.neuron_typ
  31. | Slice of Slice.neuron_typ
  32. | Lambda of Lambda.neuron_typ
  33. | LambdaArray of LambdaArray.neuron_typ
  34. | Activation of Activation.neuron_typ
  35. | GaussianNoise of GaussianNoise.neuron_typ
  36. | GaussianDropout of GaussianDropout.neuron_typ
  37. | AlphaDropout of AlphaDropout.neuron_typ
  38. | Normalisation of Normalisation.neuron_typ
  39. | Add of Add.neuron_typ
  40. | Mul of Mul.neuron_typ
  41. | Dot of Dot.neuron_typ
  42. | Max of Max.neuron_typ
  43. | Average of Average.neuron_typ
  44. | Concatenate of Concatenate.neuron_typ
    (*

    Types of neuron.

    *)
val get_in_out_shape : neuron -> int array * int array

Get both input and output shapes of a neuron.

val get_in_shape : neuron -> int array

Get the input shape of a neuron.

val get_out_shape : neuron -> int array

Get the output shape of a neuron.

val connect : int array array -> neuron -> unit

Connect this neuron to others in a neural network.

val init : neuron -> unit

Initialise the neuron and its parameters.

val reset : neuron -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron -> Optimise.Algodiff.t array

Assemble all the trainable parameters in an array, used by Optimise module.

val mkpri : neuron -> Optimise.Algodiff.t array

Assemble all the primal values in an array, used by Optimise module.

val mkadj : neuron -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron -> Optimise.Algodiff.t array -> unit

Update trainable parameters in a neuron, used by Optimise module.

val load_weights : neuron -> Optimise.Algodiff.t array -> unit

Load both trainable and non-trainable parameters into the neuron.

val save_weights : neuron -> Optimise.Algodiff.t array

Assemble both trainable and non-trainable parameters of the neuron.

val copy : neuron -> neuron

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : neuron -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_graph_sig/module-type-Sig/index.html b/owl-base/Owl_neural_graph_sig/module-type-Sig/index.html deleted file mode 100644 index d3fb0d129..000000000 --- a/owl-base/Owl_neural_graph_sig/module-type-Sig/index.html +++ /dev/null @@ -1,243 +0,0 @@ - -Sig (owl-base.Owl_neural_graph_sig.Sig)

Module type Owl_neural_graph_sig.Sig

Type definition
type node = {
  1. mutable name : string;
  2. mutable prev : node array;
  3. mutable next : node array;
  4. mutable neuron : Neuron.neuron;
  5. mutable output : Neuron.Optimise.Algodiff.t option;
  6. mutable network : network;
  7. mutable train : bool;
}
and network = {
  1. mutable nnid : string;
  2. mutable size : int;
  3. mutable roots : node array;
  4. mutable outputs : node array;
  5. mutable topo : node array;
}

Type definition of a node and a neural network.

Manipulate networks
val make_network : ?nnid:string -> int -> node array -> node array -> network

Create an empty neural network.

val make_node : - ?name:string -> - ?train:bool -> - node array -> - node array -> - Neuron.neuron -> - Neuron.Optimise.Algodiff.t option -> - network -> - node

Create a node in a neural network.

val get_roots : network -> node array

Get the roots of the neural network.

val get_outputs : network -> node array

Get the outputs of the neural network.

val get_node : network -> string -> node

Get a node in a network with the given name.

val get_network : ?name:string -> node -> network

Get the neural network of a given node associated with.

val outputs : ?name:string -> node array -> network

Get the neural network associated with the given output nodes.

val get_network_name : network -> string

get_network_name n returns the name of the network n.

val set_network_name : network -> string -> unit

set_network_name n s sets the name of the network n to s.

val collect_output : node array -> Neuron.Optimise.Algodiff.t array

Collect the output values of given nodes.

val connect_pair : node -> node -> unit

Connect two nodes in a neural network.

val connect_to_parents : node array -> node -> unit

Connect a node to a list of parents.

val add_node : - ?act_typ:Neuron.Activation.typ -> - network -> - node array -> - node -> - node

Add a node to the given network.

val input_shape : network -> int array

Get input shape of a network (without batch dimension), i.e. shape of input neuron.

val input_shapes : network -> int array array

Get input shapes of a network (without batch dimension), i.e. shape of input neurons.

Interface to optimisation engine
val init : network -> unit

Initialise the network.

val reset : network -> unit

Reset the network, i.e. all the parameters in the neurons.

val mktag : int -> network -> unit

Tag the neurons, used by Algodiff module.

val mkpar : network -> Neuron.Optimise.Algodiff.t array array

Collect the parameters of neurons, used by Optimise module.

val mkpri : network -> Neuron.Optimise.Algodiff.t array array

Collect the primal values of neurons, used by Optimise module.

val mkadj : network -> Neuron.Optimise.Algodiff.t array array

Collect the adjacent values of neurons, used by Optimise module.

val update : network -> Neuron.Optimise.Algodiff.t array array -> unit

Update the parameters of neurons, used by Optimise module.

Execute the computations in all the neurons in a network with the given input.

val run_inputs : - Neuron.Optimise.Algodiff.t array -> - network -> - Neuron.Optimise.Algodiff.t array

Execute the computations in all the neurons in a network with the given inputs.

Run the forward pass of a network.

val forward_inputs : - network -> - Neuron.Optimise.Algodiff.t array -> - Neuron.Optimise.Algodiff.t array * Neuron.Optimise.Algodiff.t array array

Run the forward pass of a network (multi-input/output version).

val backward : - network -> - Neuron.Optimise.Algodiff.t -> - Neuron.Optimise.Algodiff.t array array - * Neuron.Optimise.Algodiff.t array array

Run the backward pass of a network.

val copy : network -> network

Make a deep copy of the given network.

Make a deep copy of the given network, excluding the neurons marked with training = true.

val model_inputs : - network -> - Neuron.Optimise.Algodiff.A.arr array -> - Neuron.Optimise.Algodiff.A.arr array

Make a deep copy of the given network, excluding the neurons marked with training = true.

Create Neurons
val input : ?name:string -> int array -> node

input shape creates an input node for input data. Note that if your network has multiple inputs, you should use inputs instead.

Arguments: * shape: shape of input data.

val inputs : ?names:string array -> int array array -> node array

input shapes creates an array of input nodes for input data.

Arguments: * shapes: array of shapes of input data.

val activation : ?name:string -> Neuron.Activation.typ -> node -> node

Applies an activation function to an output.

Arguments: * activation: name of activation function to use.

val linear : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node

linear ?act_typ units node adds the regular densely-connected NN node to node.

Arguments: * units: Positive integer, dimensionality of the output space. * act_typ: Activation function to use.

val linear_nobias : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node

Similar to linear, but does not use the bias vector.

val embedding : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node

Create a node for embedding neuron.

val recurrent : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node

Create a node for recurrent neuron.

val lstm : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node

lstm units node adds a LSTM node on previous node.

Arguments: * units: Positive integer, dimensionality of the output space.

val gru : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node

gru units node adds a Gated Recurrent Unit node on previous node.

Arguments: * units: Positive integer, dimensionality of the output space.

val conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

conv1d kernel stride node adds a 1D convolution node (e.g. temporal convolution) on previous node.

Arguments: * kernel: int array consists of h, i, o. h specifies the dimension of the 1D convolution window. i and o are the dimensionalities of the input and output space. * stride: int array of 1 integer.

val conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

conv2d kernel stride node adds a 2D convolution node (e.g. spatial convolution over images) on previous node.

Arguments: * kernel: int array consists of w, h, i, o. w and h specify the width and height of the 2D convolution window. i and o are the dimensionality of the input and output space. * stride: int array of 2 integers.

val conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

conv3d kernel stride node adds a 3D convolution node (e.g. spatial convolution over volumes) on previous node.

Arguments: * kernel: int array consists of w, h, d, i, o. w, h, and d specify the 3 dimensionality of the 3D convolution window. i and o are the dimensionality of the input and output space. * stride: int array of 3 integers.

val dilated_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node

dilated_conv1d kernel stride rate node adds a 1D dilated convolution node (e.g. temporal convolution) on previous node.

Arguments: * kernel: int array consists of h, i, o. h specifies the dimension of the 1D convolution window. i and o are the dimensionalities of the input and output space. * stride: int array of 1 integer. * rate: int array of 1 integer.

val dilated_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node

dilated_conv2d kernel stride rate node adds a 2D dilated convolution node (e.g. spatial convolution over images) on previous node.

Arguments: * kernel`: int array consists of w, h, i, o. w and h specify the width and height of the 2D convolution window. i and o`` are the dimensionality of the input and output space. * stride: int array of 2 integers. * rate: int array of 2 integers.

val dilated_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node

dilated_conv3d kernel stride rate node adds a 3D dilated convolution node (e.g. spatial convolution over volumes) on previous node.

Arguments: * kernel: int array consists of w, h, d, i, o. w, h, and d specify the 3 dimensionality of the 3D convolution window. i and o are the dimensionality of the input and output space. * stride: int array of 3 integers. * rate: int array of 3 integers.

val transpose_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

transpose_conv1d kernel stride node adds a 1D transpose convolution node (e.g. temporal convolution) on previous node.

Arguments: * kernel: int array consists of h, i, o. h specifies the dimension of the 1D convolution window. i and o are the dimensionalities of the input and output space. * stride: int array of 1 integer.

val transpose_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

transpose_conv2d kernel stride node adds a 2D transpose convolution node on previous node.

Arguments: * kernel: int array consists of w, h, i, o. w and h specify the width and height of the 2D convolution window. i and o are the dimensionality of the input and output space. * stride: int array of 2 integers.

val transpose_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

transpose_conv3d kernel stride node adds a 3D transpose convolution node (e.g. spatial convolution over volumes) on previous node.

Arguments: * kernel: int array consists of w, h, d, i, o. w, h, and d specify the 3 dimensionality of the 3D convolution window. i and o are the dimensionality of the input and output space. * stride: int array of 3 integers.

val fully_connected : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node

fully_connected outputs node adds a fully connected node to node.

Arguments: * outputs: integer, the number of output units in the node.

val max_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

max_pool1d ~padding ~act_typ pool_size stride node adds a max pooling operation for temporal data to node.

Arguments: * pool_size: Array of one integer, size of the max pooling windows. * stride: Array of one integer, factor by which to downscale.

val max_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

max_pool2d ~padding ~act_typ pool_size stride node adds a max pooling operation for spatial data to node.

Arguments: * pool_size: Array of 2 integers, size of the max pooling windows. * stride: Array of 2 integers, factor by which to downscale.

val avg_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

avg_pool1d ~padding ~act_typ pool_size stride node adds a average pooling operation for temporal data to node.

Arguments: * pool_size: Array of one integer, size of the max pooling windows. * stride: Array of one integer, factor by which to downscale.

val avg_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node

avg_pool2d ~padding ~act_typ pool_size stride node adds a average pooling operation for spatial data to node.

Arguments: * pool_size: Array of 2 integers, size of the max pooling windows. * stride: Array of 2 integers, factor by which to downscale.

val global_max_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node

global_max_pool1d adds global max pooling operation for temporal data.

val global_max_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node

global_max_poo2d global max pooling operation for spatial data.

val global_avg_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node

global_avg_pool1d adds global average pooling operation for temporal data.

val global_avg_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node

global_avg_poo2d global average pooling operation for spatial data.

val upsampling2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - node -> - node

upsampling2d ~act_typ size node adds a upsampling operation for spatial data to node.

Arguments: * size: array of two integers, namely the upsampling factors for columns and rows.

val padding2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array array -> - node -> - node

padding2d ~act_typ padding node adds rows and columns of zeros at the top, bottom, left and right side of an image tensor.

Arguments: * padding: array of 2 arrays of 2 integers, interpreted as | [|top_pad; bottom_pad|]; [|left_pad; right_pad|]|.

val dropout : ?name:string -> float -> node -> node

dropout rate node applies Dropout to the input to prevent overfitting.

Arguments: * rate: float between 0 and 1. Fraction of the input units to drop.

val gaussian_noise : ?name:string -> float -> node -> node

gaussian_noise stddev node applies additive zero-centered Gaussian noise.

Arguments: * stddev: float, standard deviation of the noise distribution.

val gaussian_dropout : ?name:string -> float -> node -> node

gaussian_dropout rate node applies multiplicative 1-centered Gaussian noise. Only active at training time.

Arguments: * rates: float, drop probability

val alpha_dropout : ?name:string -> float -> node -> node

alpha_dropout rate node applies Alpha Dropout to the input node. Only active at training time.

Arguments: * rates: float, drop probability

val normalisation : - ?name:string -> - ?axis:int -> - ?training:bool -> - ?decay:float -> - ?mu:Neuron.Optimise.Algodiff.A.arr -> - ?var:Neuron.Optimise.Algodiff.A.arr -> - node -> - node

normalisation axis node normalise the activations of the previous node at each batch.

Arguments: * axis: Integer, the axis that should be normalised (typically the features axis). Default value is 0.

val reshape : ?name:string -> int array -> node -> node

reshape target_shape node reshapes an output to a certain shape.

Arguments: * target_shape: target shape. Array of integers. Does not include the batch axis.

val flatten : ?name:string -> node -> node

flatten node flattens the input. Does not affect the batch size.

val slice : ?name:string -> int list list -> node -> node

slice node slices the input. Does not affect the batch size.

val lambda : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - ?out_shape:int array -> - (Neuron.Optimise.Algodiff.t -> Neuron.Optimise.Algodiff.t) -> - node -> - node

lambda ?target_shape func node wraps arbitrary expression as a Node object.

Arguments: * func: The function to be evaluated. Takes input tensor as first argument. * target_shape: the shape of the tensor returned by func; set to the same as input shape if not specified.

val lambda_array : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - (Neuron.Optimise.Algodiff.t array -> Neuron.Optimise.Algodiff.t) -> - node array -> - node

lambda_array target_shape func node wraps arbitrary expression as a Node object.

Arguments: * target_shape: the shape of the tensor returned by func. * func: The function to be evaluated. Takes input tensor array as first argument.

val add : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node

Node that adds a list of inputs.

It takes as input an array of nodes, all of the same shape, and returns a single node (also of the same shape).

val mul : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node

Node that multiplies (element-wise) a list of inputs.

It takes as input an array of nodes, all of the same shape, and returns a single node (also of the same shape).

val dot : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node

Node that computes a dot product between samples in two nodes.

val max : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node

Node that computes the maximum (element-wise) a list of inputs.

val average : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node array -> - node

Node that averages a list of inputs.

It takes as input an array of nodes, all of the same shape, and returns a single node (also of the same shape).

val concatenate : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int -> - node array -> - node

concatenate axis nodes concatenates a array of nodes and return as a single node.

Arguments: * axis: Axis along which to concatenate.

Helper functions
val to_string : network -> string

Convert a neural network to its string representation.

val pp_network : Stdlib.Format.formatter -> network -> unit

Pretty printing function a neural network.

val print : network -> unit

Print the string representation of a neural network to the standard output.

val save : ?unsafe:bool -> network -> string -> unit

Serialise a network and save it to the a file with the given name. Set the unsafe flag to true if network contains Lambda layer.

val load : string -> network

Load the neural network from a file with the given name.

val save_weights : network -> string -> unit

Save all the weights in a neural network to a file. The weights and the name of their associated neurons are saved as key-value pairs in a hash table.

val load_weights : network -> string -> unit

Load the weights from a file of the given name. Note that the weights and the name of their associated neurons are saved as key-value pairs in a hash table.

val make_subnetwork : - ?copy:bool -> - ?make_inputs:string array -> - network -> - string array -> - network

get_subnetwork ?copy ?make_inputs network output_names constructs a subnetwork of nodes on which output_names depend, replacing nodes with names in make_inputs with input nodes.

Arguments: copy: Whether to copy or reference the original node weights. Defaults to true. make_inputs: Names of nodes to use as inputs to the subnetwork. Defaults to ||, which uses the original inputs. nn: The neural network from which the subnetwork is constructed. output_names: Names of nodes to use as outputs.

Train Networks

Generic function of training a neural network.

Train a neural network with various configurations.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/.dummy b/owl-base/Owl_neural_neuron/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_neural_neuron/Make/Activation/index.html b/owl-base/Owl_neural_neuron/Make/Activation/index.html deleted file mode 100644 index 62f2bd3f6..000000000 --- a/owl-base/Owl_neural_neuron/Make/Activation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Activation (owl-base.Owl_neural_neuron.Make.Activation)

Module Make.Activation

type typ =
  1. | Elu
  2. | Relu
  3. | Sigmoid
  4. | HardSigmoid
  5. | Softmax of int
  6. | Softplus
  7. | Softsign
  8. | Tanh
  9. | Relu6
  10. | LeakyRelu of float
  11. | TRelu of float
  12. | Custom of Optimise.Algodiff.t -> Optimise.Algodiff.t
  13. | None
type neuron_typ = {
  1. mutable activation : typ;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val run_activation : Optimise.Algodiff.t -> typ -> Optimise.Algodiff.t
val copy : neuron_typ -> neuron_typ
val activation_to_string : typ -> string
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Add/index.html b/owl-base/Owl_neural_neuron/Make/Add/index.html deleted file mode 100644 index b56e79ad5..000000000 --- a/owl-base/Owl_neural_neuron/Make/Add/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Add (owl-base.Owl_neural_neuron.Make.Add)

Module Make.Add

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/AlphaDropout/index.html b/owl-base/Owl_neural_neuron/Make/AlphaDropout/index.html deleted file mode 100644 index dc47e13e2..000000000 --- a/owl-base/Owl_neural_neuron/Make/AlphaDropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AlphaDropout (owl-base.Owl_neural_neuron.Make.AlphaDropout)

Module Make.AlphaDropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Average/index.html b/owl-base/Owl_neural_neuron/Make/Average/index.html deleted file mode 100644 index 979a4ee61..000000000 --- a/owl-base/Owl_neural_neuron/Make/Average/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Average (owl-base.Owl_neural_neuron.Make.Average)

Module Make.Average

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/AvgPool1D/index.html b/owl-base/Owl_neural_neuron/Make/AvgPool1D/index.html deleted file mode 100644 index 9aca1922f..000000000 --- a/owl-base/Owl_neural_neuron/Make/AvgPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AvgPool1D (owl-base.Owl_neural_neuron.Make.AvgPool1D)

Module Make.AvgPool1D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/AvgPool2D/index.html b/owl-base/Owl_neural_neuron/Make/AvgPool2D/index.html deleted file mode 100644 index bb8bf5ee1..000000000 --- a/owl-base/Owl_neural_neuron/Make/AvgPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AvgPool2D (owl-base.Owl_neural_neuron.Make.AvgPool2D)

Module Make.AvgPool2D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Concatenate/index.html b/owl-base/Owl_neural_neuron/Make/Concatenate/index.html deleted file mode 100644 index 6a390594b..000000000 --- a/owl-base/Owl_neural_neuron/Make/Concatenate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Concatenate (owl-base.Owl_neural_neuron.Make.Concatenate)

Module Make.Concatenate

type neuron_typ = {
  1. mutable axis : int;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Conv1D/index.html b/owl-base/Owl_neural_neuron/Make/Conv1D/index.html deleted file mode 100644 index f12dfcd94..000000000 --- a/owl-base/Owl_neural_neuron/Make/Conv1D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv1D (owl-base.Owl_neural_neuron.Make.Conv1D)

Module Make.Conv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Conv2D/index.html b/owl-base/Owl_neural_neuron/Make/Conv2D/index.html deleted file mode 100644 index 6dbef2df9..000000000 --- a/owl-base/Owl_neural_neuron/Make/Conv2D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv2D (owl-base.Owl_neural_neuron.Make.Conv2D)

Module Make.Conv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Conv3D/index.html b/owl-base/Owl_neural_neuron/Make/Conv3D/index.html deleted file mode 100644 index cf44c6cf4..000000000 --- a/owl-base/Owl_neural_neuron/Make/Conv3D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv3D (owl-base.Owl_neural_neuron.Make.Conv3D)

Module Make.Conv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/DilatedConv1D/index.html b/owl-base/Owl_neural_neuron/Make/DilatedConv1D/index.html deleted file mode 100644 index e3bf5d7d3..000000000 --- a/owl-base/Owl_neural_neuron/Make/DilatedConv1D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv1D (owl-base.Owl_neural_neuron.Make.DilatedConv1D)

Module Make.DilatedConv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/DilatedConv2D/index.html b/owl-base/Owl_neural_neuron/Make/DilatedConv2D/index.html deleted file mode 100644 index d31462ec3..000000000 --- a/owl-base/Owl_neural_neuron/Make/DilatedConv2D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv2D (owl-base.Owl_neural_neuron.Make.DilatedConv2D)

Module Make.DilatedConv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/DilatedConv3D/index.html b/owl-base/Owl_neural_neuron/Make/DilatedConv3D/index.html deleted file mode 100644 index 725fca665..000000000 --- a/owl-base/Owl_neural_neuron/Make/DilatedConv3D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv3D (owl-base.Owl_neural_neuron.Make.DilatedConv3D)

Module Make.DilatedConv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Dot/index.html b/owl-base/Owl_neural_neuron/Make/Dot/index.html deleted file mode 100644 index 953b956bc..000000000 --- a/owl-base/Owl_neural_neuron/Make/Dot/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Dot (owl-base.Owl_neural_neuron.Make.Dot)

Module Make.Dot

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Dropout/index.html b/owl-base/Owl_neural_neuron/Make/Dropout/index.html deleted file mode 100644 index 5988b2092..000000000 --- a/owl-base/Owl_neural_neuron/Make/Dropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Dropout (owl-base.Owl_neural_neuron.Make.Dropout)

Module Make.Dropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Embedding/index.html b/owl-base/Owl_neural_neuron/Make/Embedding/index.html deleted file mode 100644 index 066e95a9f..000000000 --- a/owl-base/Owl_neural_neuron/Make/Embedding/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Embedding (owl-base.Owl_neural_neuron.Make.Embedding)

Module Make.Embedding

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_dim : int;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Flatten/index.html b/owl-base/Owl_neural_neuron/Make/Flatten/index.html deleted file mode 100644 index d29b1e099..000000000 --- a/owl-base/Owl_neural_neuron/Make/Flatten/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Flatten (owl-base.Owl_neural_neuron.Make.Flatten)

Module Make.Flatten

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/FullyConnected/index.html b/owl-base/Owl_neural_neuron/Make/FullyConnected/index.html deleted file mode 100644 index bf614ff81..000000000 --- a/owl-base/Owl_neural_neuron/Make/FullyConnected/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -FullyConnected (owl-base.Owl_neural_neuron.Make.FullyConnected)

Module Make.FullyConnected

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/GRU/index.html b/owl-base/Owl_neural_neuron/Make/GRU/index.html deleted file mode 100644 index 5991893f8..000000000 --- a/owl-base/Owl_neural_neuron/Make/GRU/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GRU (owl-base.Owl_neural_neuron.Make.GRU)

Module Make.GRU

type neuron_typ = {
  1. mutable wxz : Optimise.Algodiff.t;
  2. mutable whz : Optimise.Algodiff.t;
  3. mutable wxr : Optimise.Algodiff.t;
  4. mutable whr : Optimise.Algodiff.t;
  5. mutable wxh : Optimise.Algodiff.t;
  6. mutable whh : Optimise.Algodiff.t;
  7. mutable bz : Optimise.Algodiff.t;
  8. mutable br : Optimise.Algodiff.t;
  9. mutable bh : Optimise.Algodiff.t;
  10. mutable h : Optimise.Algodiff.t;
  11. mutable init_typ : Init.typ;
  12. mutable in_shape : int array;
  13. mutable out_shape : int array;
}
val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/GaussianDropout/index.html b/owl-base/Owl_neural_neuron/Make/GaussianDropout/index.html deleted file mode 100644 index 728513fc9..000000000 --- a/owl-base/Owl_neural_neuron/Make/GaussianDropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GaussianDropout (owl-base.Owl_neural_neuron.Make.GaussianDropout)

Module Make.GaussianDropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/GaussianNoise/index.html b/owl-base/Owl_neural_neuron/Make/GaussianNoise/index.html deleted file mode 100644 index 01a72f180..000000000 --- a/owl-base/Owl_neural_neuron/Make/GaussianNoise/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GaussianNoise (owl-base.Owl_neural_neuron.Make.GaussianNoise)

Module Make.GaussianNoise

type neuron_typ = {
  1. mutable sigma : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/GlobalAvgPool1D/index.html b/owl-base/Owl_neural_neuron/Make/GlobalAvgPool1D/index.html deleted file mode 100644 index 05147fef7..000000000 --- a/owl-base/Owl_neural_neuron/Make/GlobalAvgPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalAvgPool1D (owl-base.Owl_neural_neuron.Make.GlobalAvgPool1D)

Module Make.GlobalAvgPool1D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/GlobalAvgPool2D/index.html b/owl-base/Owl_neural_neuron/Make/GlobalAvgPool2D/index.html deleted file mode 100644 index 091f2518f..000000000 --- a/owl-base/Owl_neural_neuron/Make/GlobalAvgPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalAvgPool2D (owl-base.Owl_neural_neuron.Make.GlobalAvgPool2D)

Module Make.GlobalAvgPool2D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/GlobalMaxPool1D/index.html b/owl-base/Owl_neural_neuron/Make/GlobalMaxPool1D/index.html deleted file mode 100644 index c6a58230f..000000000 --- a/owl-base/Owl_neural_neuron/Make/GlobalMaxPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalMaxPool1D (owl-base.Owl_neural_neuron.Make.GlobalMaxPool1D)

Module Make.GlobalMaxPool1D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/GlobalMaxPool2D/index.html b/owl-base/Owl_neural_neuron/Make/GlobalMaxPool2D/index.html deleted file mode 100644 index 0d3d42bd3..000000000 --- a/owl-base/Owl_neural_neuron/Make/GlobalMaxPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalMaxPool2D (owl-base.Owl_neural_neuron.Make.GlobalMaxPool2D)

Module Make.GlobalMaxPool2D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Init/index.html b/owl-base/Owl_neural_neuron/Make/Init/index.html deleted file mode 100644 index 6ffcf7061..000000000 --- a/owl-base/Owl_neural_neuron/Make/Init/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Init (owl-base.Owl_neural_neuron.Make.Init)

Module Make.Init

type typ =
  1. | Uniform of float * float
  2. | Gaussian of float * float
  3. | Standard
  4. | Tanh
  5. | GlorotNormal
  6. | GlorotUniform
  7. | LecunNormal
  8. | HeNormal
  9. | Custom of int array -> Optimise.Algodiff.t
val calc_fans : int array -> float * float
val run : typ -> int array -> Optimise.Algodiff.t -> Optimise.Algodiff.t
val to_string : typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Input/index.html b/owl-base/Owl_neural_neuron/Make/Input/index.html deleted file mode 100644 index aa30baf14..000000000 --- a/owl-base/Owl_neural_neuron/Make/Input/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Input (owl-base.Owl_neural_neuron.Make.Input)

Module Make.Input

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : int array -> neuron_typ
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/LSTM/index.html b/owl-base/Owl_neural_neuron/Make/LSTM/index.html deleted file mode 100644 index cdb6a04d4..000000000 --- a/owl-base/Owl_neural_neuron/Make/LSTM/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -LSTM (owl-base.Owl_neural_neuron.Make.LSTM)

Module Make.LSTM

type neuron_typ = {
  1. mutable wxi : Optimise.Algodiff.t;
  2. mutable whi : Optimise.Algodiff.t;
  3. mutable wxc : Optimise.Algodiff.t;
  4. mutable whc : Optimise.Algodiff.t;
  5. mutable wxf : Optimise.Algodiff.t;
  6. mutable whf : Optimise.Algodiff.t;
  7. mutable wxo : Optimise.Algodiff.t;
  8. mutable who : Optimise.Algodiff.t;
  9. mutable bi : Optimise.Algodiff.t;
  10. mutable bc : Optimise.Algodiff.t;
  11. mutable bf : Optimise.Algodiff.t;
  12. mutable bo : Optimise.Algodiff.t;
  13. mutable c : Optimise.Algodiff.t;
  14. mutable h : Optimise.Algodiff.t;
  15. mutable init_typ : Init.typ;
  16. mutable in_shape : int array;
  17. mutable out_shape : int array;
}
val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Lambda/index.html b/owl-base/Owl_neural_neuron/Make/Lambda/index.html deleted file mode 100644 index cf8b7ce2a..000000000 --- a/owl-base/Owl_neural_neuron/Make/Lambda/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Lambda (owl-base.Owl_neural_neuron.Make.Lambda)

Module Make.Lambda

type neuron_typ = {
  1. mutable lambda : Optimise.Algodiff.t -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : - ?out_shape:int array -> - (Optimise.Algodiff.t -> Optimise.Algodiff.t) -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/LambdaArray/index.html b/owl-base/Owl_neural_neuron/Make/LambdaArray/index.html deleted file mode 100644 index 68357cacd..000000000 --- a/owl-base/Owl_neural_neuron/Make/LambdaArray/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -LambdaArray (owl-base.Owl_neural_neuron.Make.LambdaArray)

Module Make.LambdaArray

type neuron_typ = {
  1. mutable lambda : Optimise.Algodiff.t array -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : - int array -> - (Optimise.Algodiff.t array -> Optimise.Algodiff.t) -> - neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Linear/index.html b/owl-base/Owl_neural_neuron/Make/Linear/index.html deleted file mode 100644 index 7d2dad15e..000000000 --- a/owl-base/Owl_neural_neuron/Make/Linear/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Linear (owl-base.Owl_neural_neuron.Make.Linear)

Module Make.Linear

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/LinearNoBias/index.html b/owl-base/Owl_neural_neuron/Make/LinearNoBias/index.html deleted file mode 100644 index b1efbc902..000000000 --- a/owl-base/Owl_neural_neuron/Make/LinearNoBias/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -LinearNoBias (owl-base.Owl_neural_neuron.Make.LinearNoBias)

Module Make.LinearNoBias

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_shape : int array;
  4. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Masking/index.html b/owl-base/Owl_neural_neuron/Make/Masking/index.html deleted file mode 100644 index dc7b6116b..000000000 --- a/owl-base/Owl_neural_neuron/Make/Masking/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Masking (owl-base.Owl_neural_neuron.Make.Masking)

Module Make.Masking

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Max/index.html b/owl-base/Owl_neural_neuron/Make/Max/index.html deleted file mode 100644 index b70bba852..000000000 --- a/owl-base/Owl_neural_neuron/Make/Max/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Max (owl-base.Owl_neural_neuron.Make.Max)

Module Make.Max

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/MaxPool1D/index.html b/owl-base/Owl_neural_neuron/Make/MaxPool1D/index.html deleted file mode 100644 index 779d6aac6..000000000 --- a/owl-base/Owl_neural_neuron/Make/MaxPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MaxPool1D (owl-base.Owl_neural_neuron.Make.MaxPool1D)

Module Make.MaxPool1D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/MaxPool2D/index.html b/owl-base/Owl_neural_neuron/Make/MaxPool2D/index.html deleted file mode 100644 index d1effea3e..000000000 --- a/owl-base/Owl_neural_neuron/Make/MaxPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MaxPool2D (owl-base.Owl_neural_neuron.Make.MaxPool2D)

Module Make.MaxPool2D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Mul/index.html b/owl-base/Owl_neural_neuron/Make/Mul/index.html deleted file mode 100644 index cda90601e..000000000 --- a/owl-base/Owl_neural_neuron/Make/Mul/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mul (owl-base.Owl_neural_neuron.Make.Mul)

Module Make.Mul

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Normalisation/index.html b/owl-base/Owl_neural_neuron/Make/Normalisation/index.html deleted file mode 100644 index 7e2add471..000000000 --- a/owl-base/Owl_neural_neuron/Make/Normalisation/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Normalisation (owl-base.Owl_neural_neuron.Make.Normalisation)

Module Make.Normalisation

type neuron_typ = {
  1. mutable axis : int;
  2. mutable beta : Optimise.Algodiff.t;
  3. mutable gamma : Optimise.Algodiff.t;
  4. mutable mu : Optimise.Algodiff.t;
  5. mutable var : Optimise.Algodiff.t;
  6. mutable decay : Optimise.Algodiff.t;
  7. mutable training : bool;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?training:bool -> - ?decay:float -> - ?mu:Optimise.Algodiff.A.arr -> - ?var:Optimise.Algodiff.A.arr -> - int -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val load_weights : neuron_typ -> Optimise.Algodiff.t array -> unit
val save_weights : neuron_typ -> Optimise.Algodiff.t array
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Padding1D/index.html b/owl-base/Owl_neural_neuron/Make/Padding1D/index.html deleted file mode 100644 index d8f8a81d9..000000000 --- a/owl-base/Owl_neural_neuron/Make/Padding1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding1D (owl-base.Owl_neural_neuron.Make.Padding1D)

Module Make.Padding1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Padding2D/index.html b/owl-base/Owl_neural_neuron/Make/Padding2D/index.html deleted file mode 100644 index a55b6653a..000000000 --- a/owl-base/Owl_neural_neuron/Make/Padding2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding2D (owl-base.Owl_neural_neuron.Make.Padding2D)

Module Make.Padding2D

type neuron_typ = {
  1. mutable padding : int array array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int array array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Padding3D/index.html b/owl-base/Owl_neural_neuron/Make/Padding3D/index.html deleted file mode 100644 index 48910626c..000000000 --- a/owl-base/Owl_neural_neuron/Make/Padding3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding3D (owl-base.Owl_neural_neuron.Make.Padding3D)

Module Make.Padding3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Recurrent/index.html b/owl-base/Owl_neural_neuron/Make/Recurrent/index.html deleted file mode 100644 index 55c3bc827..000000000 --- a/owl-base/Owl_neural_neuron/Make/Recurrent/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Recurrent (owl-base.Owl_neural_neuron.Make.Recurrent)

Module Make.Recurrent

type neuron_typ = {
  1. mutable whh : Optimise.Algodiff.t;
  2. mutable wxh : Optimise.Algodiff.t;
  3. mutable why : Optimise.Algodiff.t;
  4. mutable bh : Optimise.Algodiff.t;
  5. mutable by : Optimise.Algodiff.t;
  6. mutable h : Optimise.Algodiff.t;
  7. mutable hiddens : int;
  8. mutable act : Activation.typ;
  9. mutable init_typ : Init.typ;
  10. mutable in_shape : int array;
  11. mutable out_shape : int array;
}
val create : - ?time_steps:int -> - ?inputs:int -> - int -> - int -> - Activation.typ -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Reshape/index.html b/owl-base/Owl_neural_neuron/Make/Reshape/index.html deleted file mode 100644 index 5cfba87e6..000000000 --- a/owl-base/Owl_neural_neuron/Make/Reshape/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Reshape (owl-base.Owl_neural_neuron.Make.Reshape)

Module Make.Reshape

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : ?inputs:int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/Slice/index.html b/owl-base/Owl_neural_neuron/Make/Slice/index.html deleted file mode 100644 index 364bd326e..000000000 --- a/owl-base/Owl_neural_neuron/Make/Slice/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Slice (owl-base.Owl_neural_neuron.Make.Slice)

Module Make.Slice

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
  3. mutable slice : int list list;
}
val create : int list list -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/TransposeConv1D/index.html b/owl-base/Owl_neural_neuron/Make/TransposeConv1D/index.html deleted file mode 100644 index f26f50a0f..000000000 --- a/owl-base/Owl_neural_neuron/Make/TransposeConv1D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv1D (owl-base.Owl_neural_neuron.Make.TransposeConv1D)

Module Make.TransposeConv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/TransposeConv2D/index.html b/owl-base/Owl_neural_neuron/Make/TransposeConv2D/index.html deleted file mode 100644 index da2a7ee7c..000000000 --- a/owl-base/Owl_neural_neuron/Make/TransposeConv2D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv2D (owl-base.Owl_neural_neuron.Make.TransposeConv2D)

Module Make.TransposeConv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/TransposeConv3D/index.html b/owl-base/Owl_neural_neuron/Make/TransposeConv3D/index.html deleted file mode 100644 index 99ea523a3..000000000 --- a/owl-base/Owl_neural_neuron/Make/TransposeConv3D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv3D (owl-base.Owl_neural_neuron.Make.TransposeConv3D)

Module Make.TransposeConv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/UpSampling1D/index.html b/owl-base/Owl_neural_neuron/Make/UpSampling1D/index.html deleted file mode 100644 index d7df3073f..000000000 --- a/owl-base/Owl_neural_neuron/Make/UpSampling1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling1D (owl-base.Owl_neural_neuron.Make.UpSampling1D)

Module Make.UpSampling1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/UpSampling2D/index.html b/owl-base/Owl_neural_neuron/Make/UpSampling2D/index.html deleted file mode 100644 index 174f111a1..000000000 --- a/owl-base/Owl_neural_neuron/Make/UpSampling2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling2D (owl-base.Owl_neural_neuron.Make.UpSampling2D)

Module Make.UpSampling2D

type neuron_typ = {
  1. mutable size : int array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/UpSampling3D/index.html b/owl-base/Owl_neural_neuron/Make/UpSampling3D/index.html deleted file mode 100644 index 6ad06a771..000000000 --- a/owl-base/Owl_neural_neuron/Make/UpSampling3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling3D (owl-base.Owl_neural_neuron.Make.UpSampling3D)

Module Make.UpSampling3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/Linalg/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index b7ca0828b..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/Mat/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index 0e9a9162d..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/Scalar/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index 9f9d45789..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/index.html deleted file mode 100644 index 05a09dfb1..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.A)

Module Algodiff.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Arr/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Arr/index.html deleted file mode 100644 index 88f3e1c2f..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/index.html deleted file mode 100644 index 2a75d4ada..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.Builder)

Module Algodiff.Builder

Ops Builder
module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t

build array input single output operations

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index ab0510c01..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index 79a4966ea..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index 225ab0e17..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index 2e652368b..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index f0397bc3b..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index 76a19e7a8..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Linalg/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index 98e66f7d0..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val logdet : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val chol : ?upper:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val qr : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val lq : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val svd : ?thin:bool -> t -> t * t * t

Refer to :doc:`owl_dense_ndarray_generic`

val sylvester : t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val lyapunov : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - t -> - t -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val (/@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val care : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dare : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Mat/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Mat/index.html deleted file mode 100644 index f3fa83a7a..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Maths/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Maths/index.html deleted file mode 100644 index a63b43334..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (-) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (/) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (**) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val add : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val div : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val kron : t -> t -> t

Refer to :doc:`owl_dense_matrix_generic`

val dot : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cross_entropy : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val abs : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val round : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mean : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val swap : int -> int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_item : t -> int -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_row : t -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val concat : axis:int -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val split : axis:int -> int array -> t -> t array

Refer to :doc:`owl_dense_ndarray_generic`

val of_arrays : t array array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val to_arrays : t -> t array array

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diag : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diagm : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val trace : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val triu : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tril : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/NN/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/NN/index.html deleted file mode 100644 index e102042b4..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:A.elt -> int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/index.html deleted file mode 100644 index ba58e7712..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Algodiff/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Algodiff (owl-base.Owl_neural_neuron.Make.Optimise.Algodiff)

Module Optimise.Algodiff

include Owl_algodiff_core_sig.Sig
Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

val make_forward : t -> t -> int -> t

TODO

val make_reverse : t -> int -> t

TODO

val reverse_prop : t -> t -> unit

TODO

val diff : (t -> t) -> t -> t

diff f x returns the exat derivative of a function f : scalar -> scalar at point x. Simply calling diff f will return its derivative function g of the same type, i.e. g : scalar -> scalar.

Keep calling this function will give you higher-order derivatives of f, i.e. f |> diff |> diff |> diff |> ...

val diff' : (t -> t) -> t -> t * t

similar to diff, but return (f x, diff f x).

val grad : (t -> t) -> t -> t

gradient of f : (vector -> scalar) at x, reverse ad.

val grad' : (t -> t) -> t -> t * t

similar to grad, but return (f x, grad f x).

val jacobian : (t -> t) -> t -> t

jacobian of f : (vector -> vector) at x, both x and y are row vectors.

val jacobian' : (t -> t) -> t -> t * t

similar to jacobian, but return (f x, jacobian f x)

val jacobianv : (t -> t) -> t -> t -> t

jacobian vector product of f : (vector -> vector) at x along v, forward ad. Namely, it calcultes (jacobian x) v

val jacobianv' : (t -> t) -> t -> t -> t * t

similar to jacobianv', but return (f x, jacobianv f x v)

val jacobianTv : (t -> t) -> t -> t -> t

transposed jacobian vector product of f : (vector -> vector) at x along v, backward ad. Namely, it calculates transpose ((jacobianv f x v)).

val jacobianTv' : (t -> t) -> t -> t -> t * t

similar to jacobianTv, but return (f x, transpose (jacobianv f x v))

val hessian : (t -> t) -> t -> t

hessian of f : (scalar -> scalar) at x.

val hessian' : (t -> t) -> t -> t * t

simiarl to hessian, but return (f x, hessian f x)

val hessianv : (t -> t) -> t -> t -> t

hessian vector product of f : (scalar -> scalar) at x along v. Namely, it calculates (hessian x) v.

val hessianv' : (t -> t) -> t -> t -> t * t

similar to hessianv, but return (f x, hessianv f x v).

val laplacian : (t -> t) -> t -> t

laplacian of f : (scalar -> scalar) at x.

val laplacian' : (t -> t) -> t -> t * t

similar to laplacian, but return (f x, laplacian f x).

val gradhessian : (t -> t) -> t -> t * t

return (grad f x, hessian f x), f : (scalar -> scalar)

val gradhessian' : (t -> t) -> t -> t * t * t

return (f x, grad f x, hessian f x)

val gradhessianv : (t -> t) -> t -> t -> t * t

return (grad f x v, hessian f x v)

val gradhessianv' : (t -> t) -> t -> t -> t * t * t

return (f x, grad f x v, hessian f x v)

include Owl_algodiff_ops_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
module Builder : - Owl_algodiff_ops_builder_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
Supported Maths functions
module Maths : sig ... end
Supported Linalg functions
module Linalg : sig ... end
Supported Neural Network functions
module NN : sig ... end
Supported Mat functions
module Mat : sig ... end
Supported Arr functions
module Arr : sig ... end
Helper functions
include Owl_algodiff_graph_convert_sig.Sig with type t := t
val to_trace : t list -> string

to_trace [t0; t1; ...] outputs the trace of computation graph on the terminal in a human-readable format.

val to_dot : t list -> string

to_dot [t0; t1; ...] outputs the trace of computation graph in the dot file format which you can use other tools further visualisation, such as Graphviz.

val pp_num : Stdlib.Format.formatter -> t -> unit

pp_num t pretty prints the abstract number used in Algodiff.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Batch/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Batch/index.html deleted file mode 100644 index 7fc2e10b6..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Batch/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Batch (owl-base.Owl_neural_neuron.Make.Optimise.Batch)

Module Optimise.Batch

Batch module

type typ =
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic

Types of batches.

val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

Execute the computations defined in module typ.

val batches : typ -> Algodiff.t -> int

Return the total number of batches given a batch typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Checkpoint/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Checkpoint/index.html deleted file mode 100644 index 05f241981..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Checkpoint/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Checkpoint (owl-base.Owl_neural_neuron.Make.Optimise.Checkpoint)

Module Optimise.Checkpoint

Checkpoint module

type state = {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}

Type definition of checkpoint

type typ =
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None

Batch type.

val init_state : int -> float -> state

init_state batches_per_epoch epochs initialises a state by specifying the number of batches per epoch and the number of epochs in total.

val default_checkpoint_fun : (string -> 'a) -> 'a

This function is used for saving intermediate files during optimisation.

val print_state_info : state -> unit

Print out the detail information of current state.

val print_summary : state -> unit

Print out the summary of current state.

val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Clipping/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Clipping/index.html deleted file mode 100644 index c2991f81a..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Clipping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Clipping (owl-base.Owl_neural_neuron.Make.Optimise.Clipping)

Module Optimise.Clipping

Clipping module

type typ =
  1. | L2norm of float
  2. | Value of float * float
  3. | None

Types of clipping functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Gradient/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Gradient/index.html deleted file mode 100644 index ecc50a311..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Gradient/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Gradient (owl-base.Owl_neural_neuron.Make.Optimise.Gradient)

Module Optimise.Gradient

Gradient module

type typ =
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton

Types of gradient function.

val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Learning_Rate/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Learning_Rate/index.html deleted file mode 100644 index 161c38258..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Learning_Rate (owl-base.Owl_neural_neuron.Make.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

Strategies for learning rate update

type typ =
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array

Representation of learning rate update strategies. Possible values include:

  • Adam (alpha, beta1, beta2), see ref for parameter meaning
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array

Update the cache of gradients.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Loss/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Loss/index.html deleted file mode 100644 index 4b7c207ca..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Loss/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Loss (owl-base.Owl_neural_neuron.Make.Optimise.Loss)

Module Optimise.Loss

Loss module

type typ =
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t

Types of loss functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Momentum/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Momentum/index.html deleted file mode 100644 index 957253f7f..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Momentum/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Momentum (owl-base.Owl_neural_neuron.Make.Optimise.Momentum)

Module Optimise.Momentum

Momentum module

type typ =
  1. | Standard of float
  2. | Nesterov of float
  3. | None

Types of momentum functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Params/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Params/index.html deleted file mode 100644 index ad912f479..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Params/index.html +++ /dev/null @@ -1,14 +0,0 @@ - -Params (owl-base.Owl_neural_neuron.Make.Optimise.Params)

Module Optimise.Params

Params module

type typ = {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}

Type definition of parameter.

val default : unit -> typ

Create module typ with default values.

val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ

This function creates a parameter object with many configurations.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Regularisation/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Regularisation/index.html deleted file mode 100644 index 657d11ca6..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Regularisation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Regularisation (owl-base.Owl_neural_neuron.Make.Optimise.Regularisation)

Module Optimise.Regularisation

Regularisation module

type typ =
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None

Types of regularisation functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Stopping/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Stopping/index.html deleted file mode 100644 index 4d30f0e1d..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Stopping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Stopping (owl-base.Owl_neural_neuron.Make.Optimise.Stopping)

Module Optimise.Stopping

Stopping module

type typ =
  1. | Const of float
  2. | Early of int * int
  3. | None

Types of stopping functions.

val run : typ -> float -> bool

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Utils/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Utils/index.html deleted file mode 100644 index da72b64af..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl-base.Owl_neural_neuron.Make.Optimise.Utils)

Module Optimise.Utils

Utils module

val sample_num : Algodiff.t -> int

Return the total number of samples in passed in ndarray.

val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

draw_samples x y draws samples from both x (observations) and y (labels). The samples will be drew along axis 0, so x and y must agree along axis 0.

val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t

get_chunk x y i c gets a continuous chunk of c samples from position i from x (observations) and y (labels).

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/index.html b/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/index.html deleted file mode 100644 index 363bc8125..000000000 --- a/owl-base/Owl_neural_neuron/Make/argument-1-Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl-base.Owl_neural_neuron.Make.Optimise)

Parameter Make.Optimise

module Utils : sig ... end

Utils module

module Learning_Rate : sig ... end

Strategies for learning rate update

module Batch : sig ... end

Batch module

module Loss : sig ... end

Loss module

module Gradient : sig ... end

Gradient module

module Momentum : sig ... end

Momentum module

module Regularisation : sig ... end

Regularisation module

module Clipping : sig ... end

Clipping module

module Stopping : sig ... end

Stopping module

module Checkpoint : sig ... end

Checkpoint module

module Params : sig ... end

Params module

Core functions
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises the weight w of passed-in function f.

* f is a function f : w -> x -> y. * w is a row vector but y can have any shape.

val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

This function is specifically designed for minimising the weights in a neural network of graph structure. In Owl's earlier versions, the functions in the regression module were actually implemented using this function.

val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises f : x -> y w.r.t x.

x is an ndarray; and y is an scalar value.

val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

TODO

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron/Make/index.html b/owl-base/Owl_neural_neuron/Make/index.html deleted file mode 100644 index 129846b59..000000000 --- a/owl-base/Owl_neural_neuron/Make/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make (owl-base.Owl_neural_neuron.Make)

Module Owl_neural_neuron.Make

Parameters

Signature

module Optimise = Optimise
module Init : sig ... end
module Input : sig ... end
module Activation : sig ... end
module Linear : sig ... end
module LinearNoBias : sig ... end
module Recurrent : sig ... end
module LSTM : sig ... end
module GRU : sig ... end
module Conv1D : sig ... end
module DilatedConv1D : sig ... end
module TransposeConv1D : sig ... end
module Conv2D : sig ... end
module DilatedConv2D : sig ... end
module TransposeConv2D : sig ... end
module Conv3D : sig ... end
module DilatedConv3D : sig ... end
module TransposeConv3D : sig ... end
module FullyConnected : sig ... end
module MaxPool1D : sig ... end
module MaxPool2D : sig ... end
module AvgPool1D : sig ... end
module AvgPool2D : sig ... end
module GlobalMaxPool1D : sig ... end
module GlobalMaxPool2D : sig ... end
module GlobalAvgPool1D : sig ... end
module GlobalAvgPool2D : sig ... end
module UpSampling1D : sig ... end
module UpSampling2D : sig ... end
module UpSampling3D : sig ... end
module Padding1D : sig ... end
module Padding2D : sig ... end
module Padding3D : sig ... end
module Lambda : sig ... end
module LambdaArray : sig ... end
module Dropout : sig ... end
module Reshape : sig ... end
module Flatten : sig ... end
module Slice : sig ... end
module Add : sig ... end
module Mul : sig ... end
module Dot : sig ... end
module Max : sig ... end
module Average : sig ... end
module Concatenate : sig ... end
module Normalisation : sig ... end
module GaussianNoise : sig ... end
module GaussianDropout : sig ... end
module AlphaDropout : sig ... end
module Embedding : sig ... end
module Masking : sig ... end
type neuron =
  1. | Input of Input.neuron_typ
  2. | Linear of Linear.neuron_typ
  3. | LinearNoBias of LinearNoBias.neuron_typ
  4. | Embedding of Embedding.neuron_typ
  5. | LSTM of LSTM.neuron_typ
  6. | GRU of GRU.neuron_typ
  7. | Recurrent of Recurrent.neuron_typ
  8. | Conv1D of Conv1D.neuron_typ
  9. | Conv2D of Conv2D.neuron_typ
  10. | Conv3D of Conv3D.neuron_typ
  11. | DilatedConv1D of DilatedConv1D.neuron_typ
  12. | DilatedConv2D of DilatedConv2D.neuron_typ
  13. | DilatedConv3D of DilatedConv3D.neuron_typ
  14. | TransposeConv1D of TransposeConv1D.neuron_typ
  15. | TransposeConv2D of TransposeConv2D.neuron_typ
  16. | TransposeConv3D of TransposeConv3D.neuron_typ
  17. | FullyConnected of FullyConnected.neuron_typ
  18. | MaxPool1D of MaxPool1D.neuron_typ
  19. | MaxPool2D of MaxPool2D.neuron_typ
  20. | AvgPool1D of AvgPool1D.neuron_typ
  21. | AvgPool2D of AvgPool2D.neuron_typ
  22. | GlobalMaxPool1D of GlobalMaxPool1D.neuron_typ
  23. | GlobalMaxPool2D of GlobalMaxPool2D.neuron_typ
  24. | GlobalAvgPool1D of GlobalAvgPool1D.neuron_typ
  25. | GlobalAvgPool2D of GlobalAvgPool2D.neuron_typ
  26. | UpSampling2D of UpSampling2D.neuron_typ
  27. | Padding2D of Padding2D.neuron_typ
  28. | Dropout of Dropout.neuron_typ
  29. | Reshape of Reshape.neuron_typ
  30. | Flatten of Flatten.neuron_typ
  31. | Slice of Slice.neuron_typ
  32. | Lambda of Lambda.neuron_typ
  33. | LambdaArray of LambdaArray.neuron_typ
  34. | Activation of Activation.neuron_typ
  35. | GaussianNoise of GaussianNoise.neuron_typ
  36. | GaussianDropout of GaussianDropout.neuron_typ
  37. | AlphaDropout of AlphaDropout.neuron_typ
  38. | Normalisation of Normalisation.neuron_typ
  39. | Add of Add.neuron_typ
  40. | Mul of Mul.neuron_typ
  41. | Dot of Dot.neuron_typ
  42. | Max of Max.neuron_typ
  43. | Average of Average.neuron_typ
  44. | Concatenate of Concatenate.neuron_typ
val get_in_out_shape : neuron -> int array * int array
val get_in_shape : neuron -> int array
val get_out_shape : neuron -> int array
val connect : int array array -> neuron -> unit
val init : neuron -> unit
val reset : neuron -> unit
val mktag : int -> neuron -> unit
val mkpar : neuron -> Optimise.Algodiff.t array
val mkpri : neuron -> Optimise.Algodiff.t array
val mkadj : neuron -> Optimise.Algodiff.t array
val update : neuron -> Optimise.Algodiff.t array -> unit
val save_weights : neuron -> Optimise.Algodiff.t array
val load_weights : neuron -> Optimise.Algodiff.t array -> unit
val copy : neuron -> neuron
val to_string : neuron -> string
val to_name : neuron -> string
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/.dummy b/owl-base/Owl_neural_neuron_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Activation/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Activation/index.html deleted file mode 100644 index 9b5739a55..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Activation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Activation (owl-base.Owl_neural_neuron_sig.Sig.Activation)

Module Sig.Activation

type typ =
  1. | Elu
  2. | Relu
  3. | Sigmoid
  4. | HardSigmoid
  5. | Softmax of int
  6. | Softplus
  7. | Softsign
  8. | Tanh
  9. | Relu6
  10. | LeakyRelu of float
  11. | TRelu of float
  12. | Custom of Optimise.Algodiff.t -> Optimise.Algodiff.t
  13. | None
    (*

    Types of activation functions.

    *)
type neuron_typ = {
  1. mutable activation : typ;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val run_activation : Optimise.Algodiff.t -> typ -> Optimise.Algodiff.t

Run one specific activation function.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val activation_to_string : typ -> string

Return the name of a specific activation function.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Add/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Add/index.html deleted file mode 100644 index 47456641e..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Add/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Add (owl-base.Owl_neural_neuron_sig.Sig.Add)

Module Sig.Add

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/AlphaDropout/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/AlphaDropout/index.html deleted file mode 100644 index e8ee8abf1..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/AlphaDropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AlphaDropout (owl-base.Owl_neural_neuron_sig.Sig.AlphaDropout)

Module Sig.AlphaDropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Average/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Average/index.html deleted file mode 100644 index 66f53cda9..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Average/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Average (owl-base.Owl_neural_neuron_sig.Sig.Average)

Module Sig.Average

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/AvgPool1D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/AvgPool1D/index.html deleted file mode 100644 index 1641bb319..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/AvgPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AvgPool1D (owl-base.Owl_neural_neuron_sig.Sig.AvgPool1D)

Module Sig.AvgPool1D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/AvgPool2D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/AvgPool2D/index.html deleted file mode 100644 index 80c8e643f..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/AvgPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -AvgPool2D (owl-base.Owl_neural_neuron_sig.Sig.AvgPool2D)

Module Sig.AvgPool2D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Concatenate/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Concatenate/index.html deleted file mode 100644 index a03ed82bb..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Concatenate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Concatenate (owl-base.Owl_neural_neuron_sig.Sig.Concatenate)

Module Sig.Concatenate

type neuron_typ = {
  1. mutable axis : int;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : int -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Conv1D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Conv1D/index.html deleted file mode 100644 index 09e8024a4..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Conv1D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv1D (owl-base.Owl_neural_neuron_sig.Sig.Conv1D)

Module Sig.Conv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Conv2D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Conv2D/index.html deleted file mode 100644 index 624d78e7e..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Conv2D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv2D (owl-base.Owl_neural_neuron_sig.Sig.Conv2D)

Module Sig.Conv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Conv3D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Conv3D/index.html deleted file mode 100644 index 5c0a118de..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Conv3D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Conv3D (owl-base.Owl_neural_neuron_sig.Sig.Conv3D)

Module Sig.Conv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/DilatedConv1D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/DilatedConv1D/index.html deleted file mode 100644 index c678d4f9b..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/DilatedConv1D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv1D (owl-base.Owl_neural_neuron_sig.Sig.DilatedConv1D)

Module Sig.DilatedConv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/DilatedConv2D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/DilatedConv2D/index.html deleted file mode 100644 index c12211081..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/DilatedConv2D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv2D (owl-base.Owl_neural_neuron_sig.Sig.DilatedConv2D)

Module Sig.DilatedConv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/DilatedConv3D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/DilatedConv3D/index.html deleted file mode 100644 index d6550c3f7..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/DilatedConv3D/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -DilatedConv3D (owl-base.Owl_neural_neuron_sig.Sig.DilatedConv3D)

Module Sig.DilatedConv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Dot/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Dot/index.html deleted file mode 100644 index be602cb45..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Dot/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Dot (owl-base.Owl_neural_neuron_sig.Sig.Dot)

Module Sig.Dot

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Dropout/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Dropout/index.html deleted file mode 100644 index 0cf15e567..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Dropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Dropout (owl-base.Owl_neural_neuron_sig.Sig.Dropout)

Module Sig.Dropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Embedding/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Embedding/index.html deleted file mode 100644 index 0f916d282..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Embedding/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Embedding (owl-base.Owl_neural_neuron_sig.Sig.Embedding)

Module Sig.Embedding

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_dim : int;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Flatten/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Flatten/index.html deleted file mode 100644 index 6706428f2..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Flatten/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Flatten (owl-base.Owl_neural_neuron_sig.Sig.Flatten)

Module Sig.Flatten

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/FullyConnected/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/FullyConnected/index.html deleted file mode 100644 index 4539afeaf..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/FullyConnected/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -FullyConnected (owl-base.Owl_neural_neuron_sig.Sig.FullyConnected)

Module Sig.FullyConnected

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/GRU/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/GRU/index.html deleted file mode 100644 index 86557d851..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/GRU/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GRU (owl-base.Owl_neural_neuron_sig.Sig.GRU)

Module Sig.GRU

type neuron_typ = {
  1. mutable wxz : Optimise.Algodiff.t;
  2. mutable whz : Optimise.Algodiff.t;
  3. mutable wxr : Optimise.Algodiff.t;
  4. mutable whr : Optimise.Algodiff.t;
  5. mutable wxh : Optimise.Algodiff.t;
  6. mutable whh : Optimise.Algodiff.t;
  7. mutable bz : Optimise.Algodiff.t;
  8. mutable br : Optimise.Algodiff.t;
  9. mutable bh : Optimise.Algodiff.t;
  10. mutable h : Optimise.Algodiff.t;
  11. mutable init_typ : Init.typ;
  12. mutable in_shape : int array;
  13. mutable out_shape : int array;
}

Neuron type definition.

val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/GaussianDropout/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/GaussianDropout/index.html deleted file mode 100644 index c436e41a0..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/GaussianDropout/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GaussianDropout (owl-base.Owl_neural_neuron_sig.Sig.GaussianDropout)

Module Sig.GaussianDropout

type neuron_typ = {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/GaussianNoise/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/GaussianNoise/index.html deleted file mode 100644 index 8875c19c8..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/GaussianNoise/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GaussianNoise (owl-base.Owl_neural_neuron_sig.Sig.GaussianNoise)

Module Sig.GaussianNoise

type neuron_typ = {
  1. mutable sigma : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : float -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalAvgPool1D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalAvgPool1D/index.html deleted file mode 100644 index 74dec4923..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalAvgPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalAvgPool1D (owl-base.Owl_neural_neuron_sig.Sig.GlobalAvgPool1D)

Module Sig.GlobalAvgPool1D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalAvgPool2D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalAvgPool2D/index.html deleted file mode 100644 index 31d04d1d3..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalAvgPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalAvgPool2D (owl-base.Owl_neural_neuron_sig.Sig.GlobalAvgPool2D)

Module Sig.GlobalAvgPool2D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalMaxPool1D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalMaxPool1D/index.html deleted file mode 100644 index 573779ed7..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalMaxPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalMaxPool1D (owl-base.Owl_neural_neuron_sig.Sig.GlobalMaxPool1D)

Module Sig.GlobalMaxPool1D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalMaxPool2D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalMaxPool2D/index.html deleted file mode 100644 index 71f0fb0c2..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/GlobalMaxPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -GlobalMaxPool2D (owl-base.Owl_neural_neuron_sig.Sig.GlobalMaxPool2D)

Module Sig.GlobalMaxPool2D

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Init/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Init/index.html deleted file mode 100644 index 978071316..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Init/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Init (owl-base.Owl_neural_neuron_sig.Sig.Init)

Module Sig.Init

type typ =
  1. | Uniform of float * float
  2. | Gaussian of float * float
  3. | Standard
  4. | Tanh
  5. | GlorotNormal
  6. | GlorotUniform
  7. | LecunNormal
  8. | HeNormal
  9. | Custom of int array -> Optimise.Algodiff.t
    (*

    Initialisation types

    *)
val calc_fans : int array -> float * float

Calculate fan-in and fan-out of weights.

val run : typ -> int array -> Optimise.Algodiff.t -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Input/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Input/index.html deleted file mode 100644 index ce70159c5..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Input/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Input (owl-base.Owl_neural_neuron_sig.Sig.Input)

Module Sig.Input

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : int array -> neuron_typ

Create the neuron.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/LSTM/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/LSTM/index.html deleted file mode 100644 index 15de7cb5a..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/LSTM/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -LSTM (owl-base.Owl_neural_neuron_sig.Sig.LSTM)

Module Sig.LSTM

type neuron_typ = {
  1. mutable wxi : Optimise.Algodiff.t;
  2. mutable whi : Optimise.Algodiff.t;
  3. mutable wxc : Optimise.Algodiff.t;
  4. mutable whc : Optimise.Algodiff.t;
  5. mutable wxf : Optimise.Algodiff.t;
  6. mutable whf : Optimise.Algodiff.t;
  7. mutable wxo : Optimise.Algodiff.t;
  8. mutable who : Optimise.Algodiff.t;
  9. mutable bi : Optimise.Algodiff.t;
  10. mutable bc : Optimise.Algodiff.t;
  11. mutable bf : Optimise.Algodiff.t;
  12. mutable bo : Optimise.Algodiff.t;
  13. mutable c : Optimise.Algodiff.t;
  14. mutable h : Optimise.Algodiff.t;
  15. mutable init_typ : Init.typ;
  16. mutable in_shape : int array;
  17. mutable out_shape : int array;
}

Neuron type definition.

val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Lambda/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Lambda/index.html deleted file mode 100644 index 4e0037ffc..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Lambda/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Lambda (owl-base.Owl_neural_neuron_sig.Sig.Lambda)

Module Sig.Lambda

type neuron_typ = {
  1. mutable lambda : Optimise.Algodiff.t -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?out_shape:int array -> - (Optimise.Algodiff.t -> Optimise.Algodiff.t) -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/LambdaArray/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/LambdaArray/index.html deleted file mode 100644 index 083ba4500..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/LambdaArray/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -LambdaArray (owl-base.Owl_neural_neuron_sig.Sig.LambdaArray)

Module Sig.LambdaArray

type neuron_typ = {
  1. mutable lambda : Optimise.Algodiff.t array -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : - int array -> - (Optimise.Algodiff.t array -> Optimise.Algodiff.t) -> - neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Linear/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Linear/index.html deleted file mode 100644 index e70bfc557..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Linear/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Linear (owl-base.Owl_neural_neuron_sig.Sig.Linear)

Module Sig.Linear

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/LinearNoBias/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/LinearNoBias/index.html deleted file mode 100644 index 72e5b7efc..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/LinearNoBias/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -LinearNoBias (owl-base.Owl_neural_neuron_sig.Sig.LinearNoBias)

Module Sig.LinearNoBias

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_shape : int array;
  4. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int -> int -> Init.typ -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Masking/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Masking/index.html deleted file mode 100644 index 3febda193..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Masking/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Masking (owl-base.Owl_neural_neuron_sig.Sig.Masking)

Module Sig.Masking

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Max/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Max/index.html deleted file mode 100644 index 1f4521583..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Max/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Max (owl-base.Owl_neural_neuron_sig.Sig.Max)

Module Sig.Max

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/MaxPool1D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/MaxPool1D/index.html deleted file mode 100644 index f92c0c3c3..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/MaxPool1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MaxPool1D (owl-base.Owl_neural_neuron_sig.Sig.MaxPool1D)

Module Sig.MaxPool1D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/MaxPool2D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/MaxPool2D/index.html deleted file mode 100644 index ff6c40a19..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/MaxPool2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MaxPool2D (owl-base.Owl_neural_neuron_sig.Sig.MaxPool2D)

Module Sig.MaxPool2D

type neuron_typ = {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}

Neuron type definition.

val create : Owl_types.padding -> int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Mul/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Mul/index.html deleted file mode 100644 index cb2ce68d4..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Mul/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mul (owl-base.Owl_neural_neuron_sig.Sig.Mul)

Module Sig.Mul

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : unit -> neuron_typ

Create the neuron.

val connect : int array array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : 'a -> neuron_typ

Make a deep copy of the neuron and its parameters.

val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Normalisation/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Normalisation/index.html deleted file mode 100644 index 0743d3cb3..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Normalisation/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Normalisation (owl-base.Owl_neural_neuron_sig.Sig.Normalisation)

Module Sig.Normalisation

type neuron_typ = {
  1. mutable axis : int;
  2. mutable beta : Optimise.Algodiff.t;
  3. mutable gamma : Optimise.Algodiff.t;
  4. mutable mu : Optimise.Algodiff.t;
  5. mutable var : Optimise.Algodiff.t;
  6. mutable decay : Optimise.Algodiff.t;
  7. mutable training : bool;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?training:bool -> - ?decay:float -> - ?mu:Optimise.Algodiff.A.arr -> - ?var:Optimise.Algodiff.A.arr -> - int -> - neuron_typ

Create the neuron. Note that axis 0 is the batch axis.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the trainable parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update trainable parameters of the neuron, used by Optimise module.

val load_weights : neuron_typ -> Optimise.Algodiff.t array -> unit

Load both trainable and non-trainable parameters into the neuron.

val save_weights : neuron_typ -> Optimise.Algodiff.t array

Assemble both trainable and non-trainable parameters of the neuron.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/Linalg/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index d8f597310..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/Mat/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index c617567e5..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/Scalar/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index 63738c02e..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/index.html deleted file mode 100644 index 29ac076e8..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.A)

Module Algodiff.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Arr/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Arr/index.html deleted file mode 100644 index 79d261266..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/index.html deleted file mode 100644 index aa40cc896..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.Builder)

Module Algodiff.Builder

Ops Builder
module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t

build array input single output operations

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index 7ac0492dd..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index 82c6622b6..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index dcd2dd022..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index bcc371dcf..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index e72e85882..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index 025be0a41..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Linalg/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index 4d49d0e87..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val logdet : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val chol : ?upper:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val qr : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val lq : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val svd : ?thin:bool -> t -> t * t * t

Refer to :doc:`owl_dense_ndarray_generic`

val sylvester : t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val lyapunov : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - t -> - t -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val (/@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val care : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dare : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Mat/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Mat/index.html deleted file mode 100644 index ac734843f..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Maths/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Maths/index.html deleted file mode 100644 index 126ae00a1..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (-) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (/) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (**) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val add : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val div : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val kron : t -> t -> t

Refer to :doc:`owl_dense_matrix_generic`

val dot : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cross_entropy : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val abs : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val round : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mean : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val swap : int -> int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_item : t -> int -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_row : t -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val concat : axis:int -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val split : axis:int -> int array -> t -> t array

Refer to :doc:`owl_dense_ndarray_generic`

val of_arrays : t array array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val to_arrays : t -> t array array

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diag : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diagm : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val trace : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val triu : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tril : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/NN/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/NN/index.html deleted file mode 100644 index 69807bf5b..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:A.elt -> int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/index.html deleted file mode 100644 index 77598fbd8..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Algodiff/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Algodiff (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Algodiff)

Module Optimise.Algodiff

include Owl_algodiff_core_sig.Sig
Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

val make_forward : t -> t -> int -> t

TODO

val make_reverse : t -> int -> t

TODO

val reverse_prop : t -> t -> unit

TODO

val diff : (t -> t) -> t -> t

diff f x returns the exat derivative of a function f : scalar -> scalar at point x. Simply calling diff f will return its derivative function g of the same type, i.e. g : scalar -> scalar.

Keep calling this function will give you higher-order derivatives of f, i.e. f |> diff |> diff |> diff |> ...

val diff' : (t -> t) -> t -> t * t

similar to diff, but return (f x, diff f x).

val grad : (t -> t) -> t -> t

gradient of f : (vector -> scalar) at x, reverse ad.

val grad' : (t -> t) -> t -> t * t

similar to grad, but return (f x, grad f x).

val jacobian : (t -> t) -> t -> t

jacobian of f : (vector -> vector) at x, both x and y are row vectors.

val jacobian' : (t -> t) -> t -> t * t

similar to jacobian, but return (f x, jacobian f x)

val jacobianv : (t -> t) -> t -> t -> t

jacobian vector product of f : (vector -> vector) at x along v, forward ad. Namely, it calcultes (jacobian x) v

val jacobianv' : (t -> t) -> t -> t -> t * t

similar to jacobianv', but return (f x, jacobianv f x v)

val jacobianTv : (t -> t) -> t -> t -> t

transposed jacobian vector product of f : (vector -> vector) at x along v, backward ad. Namely, it calculates transpose ((jacobianv f x v)).

val jacobianTv' : (t -> t) -> t -> t -> t * t

similar to jacobianTv, but return (f x, transpose (jacobianv f x v))

val hessian : (t -> t) -> t -> t

hessian of f : (scalar -> scalar) at x.

val hessian' : (t -> t) -> t -> t * t

simiarl to hessian, but return (f x, hessian f x)

val hessianv : (t -> t) -> t -> t -> t

hessian vector product of f : (scalar -> scalar) at x along v. Namely, it calculates (hessian x) v.

val hessianv' : (t -> t) -> t -> t -> t * t

similar to hessianv, but return (f x, hessianv f x v).

val laplacian : (t -> t) -> t -> t

laplacian of f : (scalar -> scalar) at x.

val laplacian' : (t -> t) -> t -> t * t

similar to laplacian, but return (f x, laplacian f x).

val gradhessian : (t -> t) -> t -> t * t

return (grad f x, hessian f x), f : (scalar -> scalar)

val gradhessian' : (t -> t) -> t -> t * t * t

return (f x, grad f x, hessian f x)

val gradhessianv : (t -> t) -> t -> t -> t * t

return (grad f x v, hessian f x v)

val gradhessianv' : (t -> t) -> t -> t -> t * t * t

return (f x, grad f x v, hessian f x v)

include Owl_algodiff_ops_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
module Builder : - Owl_algodiff_ops_builder_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
Supported Maths functions
module Maths : sig ... end
Supported Linalg functions
module Linalg : sig ... end
Supported Neural Network functions
module NN : sig ... end
Supported Mat functions
module Mat : sig ... end
Supported Arr functions
module Arr : sig ... end
Helper functions
include Owl_algodiff_graph_convert_sig.Sig with type t := t
val to_trace : t list -> string

to_trace [t0; t1; ...] outputs the trace of computation graph on the terminal in a human-readable format.

val to_dot : t list -> string

to_dot [t0; t1; ...] outputs the trace of computation graph in the dot file format which you can use other tools further visualisation, such as Graphviz.

val pp_num : Stdlib.Format.formatter -> t -> unit

pp_num t pretty prints the abstract number used in Algodiff.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Batch/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Batch/index.html deleted file mode 100644 index dead03027..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Batch/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Batch (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Batch)

Module Optimise.Batch

Batch module

type typ =
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic

Types of batches.

val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

Execute the computations defined in module typ.

val batches : typ -> Algodiff.t -> int

Return the total number of batches given a batch typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Checkpoint/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Checkpoint/index.html deleted file mode 100644 index cb1c495a1..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Checkpoint/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Checkpoint (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Checkpoint)

Module Optimise.Checkpoint

Checkpoint module

type state = {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}

Type definition of checkpoint

type typ =
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None

Batch type.

val init_state : int -> float -> state

init_state batches_per_epoch epochs initialises a state by specifying the number of batches per epoch and the number of epochs in total.

val default_checkpoint_fun : (string -> 'a) -> 'a

This function is used for saving intermediate files during optimisation.

val print_state_info : state -> unit

Print out the detail information of current state.

val print_summary : state -> unit

Print out the summary of current state.

val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Clipping/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Clipping/index.html deleted file mode 100644 index 566b77af7..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Clipping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Clipping (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Clipping)

Module Optimise.Clipping

Clipping module

type typ =
  1. | L2norm of float
  2. | Value of float * float
  3. | None

Types of clipping functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Gradient/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Gradient/index.html deleted file mode 100644 index 5f6cdab79..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Gradient/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Gradient (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Gradient)

Module Optimise.Gradient

Gradient module

type typ =
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton

Types of gradient function.

val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Learning_Rate/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Learning_Rate/index.html deleted file mode 100644 index 7097c1a84..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Learning_Rate (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

Strategies for learning rate update

type typ =
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array

Representation of learning rate update strategies. Possible values include:

  • Adam (alpha, beta1, beta2), see ref for parameter meaning
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array

Update the cache of gradients.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Loss/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Loss/index.html deleted file mode 100644 index a72e07fbb..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Loss/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Loss (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Loss)

Module Optimise.Loss

Loss module

type typ =
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t

Types of loss functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Momentum/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Momentum/index.html deleted file mode 100644 index 0b45fbd68..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Momentum/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Momentum (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Momentum)

Module Optimise.Momentum

Momentum module

type typ =
  1. | Standard of float
  2. | Nesterov of float
  3. | None

Types of momentum functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Params/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Params/index.html deleted file mode 100644 index 440af8d93..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Params/index.html +++ /dev/null @@ -1,14 +0,0 @@ - -Params (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Params)

Module Optimise.Params

Params module

type typ = {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}

Type definition of parameter.

val default : unit -> typ

Create module typ with default values.

val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ

This function creates a parameter object with many configurations.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Regularisation/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Regularisation/index.html deleted file mode 100644 index 5c55e0099..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Regularisation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Regularisation (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Regularisation)

Module Optimise.Regularisation

Regularisation module

type typ =
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None

Types of regularisation functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Stopping/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Stopping/index.html deleted file mode 100644 index 3ad1a165d..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Stopping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Stopping (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Stopping)

Module Optimise.Stopping

Stopping module

type typ =
  1. | Const of float
  2. | Early of int * int
  3. | None

Types of stopping functions.

val run : typ -> float -> bool

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Utils/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Utils/index.html deleted file mode 100644 index 515b31ed9..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl-base.Owl_neural_neuron_sig.Sig.Optimise.Utils)

Module Optimise.Utils

Utils module

val sample_num : Algodiff.t -> int

Return the total number of samples in passed in ndarray.

val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

draw_samples x y draws samples from both x (observations) and y (labels). The samples will be drew along axis 0, so x and y must agree along axis 0.

val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t

get_chunk x y i c gets a continuous chunk of c samples from position i from x (observations) and y (labels).

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/index.html deleted file mode 100644 index 210927fc4..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl-base.Owl_neural_neuron_sig.Sig.Optimise)

Module Sig.Optimise

module Utils : sig ... end

Utils module

module Learning_Rate : sig ... end

Strategies for learning rate update

module Batch : sig ... end

Batch module

module Loss : sig ... end

Loss module

module Gradient : sig ... end

Gradient module

module Momentum : sig ... end

Momentum module

module Regularisation : sig ... end

Regularisation module

module Clipping : sig ... end

Clipping module

module Stopping : sig ... end

Stopping module

module Checkpoint : sig ... end

Checkpoint module

module Params : sig ... end

Params module

Core functions
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises the weight w of passed-in function f.

* f is a function f : w -> x -> y. * w is a row vector but y can have any shape.

val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

This function is specifically designed for minimising the weights in a neural network of graph structure. In Owl's earlier versions, the functions in the regression module were actually implemented using this function.

val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises f : x -> y w.r.t x.

x is an ndarray; and y is an scalar value.

val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

TODO

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Padding1D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Padding1D/index.html deleted file mode 100644 index 10e9dff1b..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Padding1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding1D (owl-base.Owl_neural_neuron_sig.Sig.Padding1D)

Module Sig.Padding1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Padding2D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Padding2D/index.html deleted file mode 100644 index ab89c489a..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Padding2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding2D (owl-base.Owl_neural_neuron_sig.Sig.Padding2D)

Module Sig.Padding2D

type neuron_typ = {
  1. mutable padding : int array array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : int array array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Padding3D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Padding3D/index.html deleted file mode 100644 index 6810a331d..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Padding3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding3D (owl-base.Owl_neural_neuron_sig.Sig.Padding3D)

Module Sig.Padding3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Recurrent/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Recurrent/index.html deleted file mode 100644 index 7136fb4d7..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Recurrent/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Recurrent (owl-base.Owl_neural_neuron_sig.Sig.Recurrent)

Module Sig.Recurrent

type neuron_typ = {
  1. mutable whh : Optimise.Algodiff.t;
  2. mutable wxh : Optimise.Algodiff.t;
  3. mutable why : Optimise.Algodiff.t;
  4. mutable bh : Optimise.Algodiff.t;
  5. mutable by : Optimise.Algodiff.t;
  6. mutable h : Optimise.Algodiff.t;
  7. mutable hiddens : int;
  8. mutable act : Activation.typ;
  9. mutable init_typ : Init.typ;
  10. mutable in_shape : int array;
  11. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?time_steps:int -> - ?inputs:int -> - int -> - int -> - Activation.typ -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Reshape/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Reshape/index.html deleted file mode 100644 index 0bcd4fbf1..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Reshape/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Reshape (owl-base.Owl_neural_neuron_sig.Sig.Reshape)

Module Sig.Reshape

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}

Neuron type definition.

val create : ?inputs:int array -> int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Slice/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/Slice/index.html deleted file mode 100644 index bc75eccbb..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/Slice/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Slice (owl-base.Owl_neural_neuron_sig.Sig.Slice)

Module Sig.Slice

type neuron_typ = {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
  3. mutable slice : int list list;
}

Neuron type definition.

val create : int list list -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/TransposeConv1D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/TransposeConv1D/index.html deleted file mode 100644 index 60b96737b..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/TransposeConv1D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv1D (owl-base.Owl_neural_neuron_sig.Sig.TransposeConv1D)

Module Sig.TransposeConv1D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/TransposeConv2D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/TransposeConv2D/index.html deleted file mode 100644 index a30d4c510..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/TransposeConv2D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv2D (owl-base.Owl_neural_neuron_sig.Sig.TransposeConv2D)

Module Sig.TransposeConv2D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/TransposeConv3D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/TransposeConv3D/index.html deleted file mode 100644 index e2a389d1f..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/TransposeConv3D/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -TransposeConv3D (owl-base.Owl_neural_neuron_sig.Sig.TransposeConv3D)

Module Sig.TransposeConv3D

type neuron_typ = {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}

Neuron type definition.

val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val init : neuron_typ -> unit

Initialise the neuron and its parameters.

val reset : neuron_typ -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron_typ -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron_typ -> Optimise.Algodiff.t array

Assemble all the parameters in an array, used by Optimise module.

val mkpri : neuron_typ -> Optimise.Algodiff.t array

Assemble all the primial values in an array, used by Optimise module.

val mkadj : neuron_typ -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron_typ -> Optimise.Algodiff.t array -> unit

Update parameters in a neuron, used by Optimise module.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/UpSampling1D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/UpSampling1D/index.html deleted file mode 100644 index b553e4940..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/UpSampling1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling1D (owl-base.Owl_neural_neuron_sig.Sig.UpSampling1D)

Module Sig.UpSampling1D

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/UpSampling2D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/UpSampling2D/index.html deleted file mode 100644 index 769a36254..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/UpSampling2D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling2D (owl-base.Owl_neural_neuron_sig.Sig.UpSampling2D)

Module Sig.UpSampling2D

type neuron_typ = {
  1. mutable size : int array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}

Neuron type definition.

val create : int array -> neuron_typ

Create the neuron.

val connect : int array -> neuron_typ -> unit

Connect this neuron to others in a neural network.

val copy : neuron_typ -> neuron_typ

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron_typ -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : unit -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/UpSampling3D/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/UpSampling3D/index.html deleted file mode 100644 index c43162d9f..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/UpSampling3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling3D (owl-base.Owl_neural_neuron_sig.Sig.UpSampling3D)

Module Sig.UpSampling3D

\ No newline at end of file diff --git a/owl-base/Owl_neural_neuron_sig/module-type-Sig/index.html b/owl-base/Owl_neural_neuron_sig/module-type-Sig/index.html deleted file mode 100644 index 8413aa8a1..000000000 --- a/owl-base/Owl_neural_neuron_sig/module-type-Sig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Sig (owl-base.Owl_neural_neuron_sig.Sig)

Module type Owl_neural_neuron_sig.Sig

Init neuron
module Init : sig ... end
Input neuron
module Input : sig ... end
Activation neuron
module Activation : sig ... end
Linear neuron
module Linear : sig ... end
LinearNoBias neuron
module LinearNoBias : sig ... end
Recurrent neuron
module Recurrent : sig ... end
LSTM neuron
module LSTM : sig ... end
GRU neuron
module GRU : sig ... end
Conv1D neuron
module Conv1D : sig ... end
Conv2D neuron
module Conv2D : sig ... end
Conv3D neuron
module Conv3D : sig ... end
DilatedConv1D neuron
module DilatedConv1D : sig ... end
DilatedConv2D neuron
module DilatedConv2D : sig ... end
DilatedConv3D neuron
module DilatedConv3D : sig ... end
TransposeConv1D neuron
module TransposeConv1D : sig ... end
TransposeConv2D neuron
module TransposeConv2D : sig ... end
TransposeConv3D neuron
module TransposeConv3D : sig ... end
FullyConnected neuron
module FullyConnected : sig ... end
MaxPool1D neuron
module MaxPool1D : sig ... end
MaxPool2D neuron
module MaxPool2D : sig ... end
AvgPool1D neuron
module AvgPool1D : sig ... end
AvgPool2D neuron
module AvgPool2D : sig ... end
GlobalMaxPool1D neuron
module GlobalMaxPool1D : sig ... end
GlobalMaxPool2D neuron
module GlobalMaxPool2D : sig ... end
GlobalAvgPool1D neuron
module GlobalAvgPool1D : sig ... end
GlobalAvgPool2D neuron
module GlobalAvgPool2D : sig ... end
UpSampling1D neuron
module UpSampling1D : sig ... end
UpSampling2D neuron
module UpSampling2D : sig ... end
UpSampling3D neuron
module UpSampling3D : sig ... end
Padding1D neuron
module Padding1D : sig ... end
Padding2D neuron
module Padding2D : sig ... end
Padding3D neuron
module Padding3D : sig ... end
Lambda neuron
module Lambda : sig ... end
LambdaArray neuron
module LambdaArray : sig ... end
Dropout neuron
module Dropout : sig ... end
Reshape neuron
module Reshape : sig ... end
Flatten neuron
module Flatten : sig ... end
Slice neuron
module Slice : sig ... end
Add neuron
module Add : sig ... end
Mul neuron
module Mul : sig ... end
Dot neuron
module Dot : sig ... end
Max neuron
module Max : sig ... end
Average neuron
module Average : sig ... end
Concatenate neuron
module Concatenate : sig ... end
Normalisation neuron
module Normalisation : sig ... end
GaussianNoise neuron
module GaussianNoise : sig ... end
GaussianDropout neuron
module GaussianDropout : sig ... end
AlphaDropout neuron
module AlphaDropout : sig ... end
Embedding neuron
module Embedding : sig ... end
Masking neuron
module Masking : sig ... end
Core functions
type neuron =
  1. | Input of Input.neuron_typ
  2. | Linear of Linear.neuron_typ
  3. | LinearNoBias of LinearNoBias.neuron_typ
  4. | Embedding of Embedding.neuron_typ
  5. | LSTM of LSTM.neuron_typ
  6. | GRU of GRU.neuron_typ
  7. | Recurrent of Recurrent.neuron_typ
  8. | Conv1D of Conv1D.neuron_typ
  9. | Conv2D of Conv2D.neuron_typ
  10. | Conv3D of Conv3D.neuron_typ
  11. | DilatedConv1D of DilatedConv1D.neuron_typ
  12. | DilatedConv2D of DilatedConv2D.neuron_typ
  13. | DilatedConv3D of DilatedConv3D.neuron_typ
  14. | TransposeConv1D of TransposeConv1D.neuron_typ
  15. | TransposeConv2D of TransposeConv2D.neuron_typ
  16. | TransposeConv3D of TransposeConv3D.neuron_typ
  17. | FullyConnected of FullyConnected.neuron_typ
  18. | MaxPool1D of MaxPool1D.neuron_typ
  19. | MaxPool2D of MaxPool2D.neuron_typ
  20. | AvgPool1D of AvgPool1D.neuron_typ
  21. | AvgPool2D of AvgPool2D.neuron_typ
  22. | GlobalMaxPool1D of GlobalMaxPool1D.neuron_typ
  23. | GlobalMaxPool2D of GlobalMaxPool2D.neuron_typ
  24. | GlobalAvgPool1D of GlobalAvgPool1D.neuron_typ
  25. | GlobalAvgPool2D of GlobalAvgPool2D.neuron_typ
  26. | UpSampling2D of UpSampling2D.neuron_typ
  27. | Padding2D of Padding2D.neuron_typ
  28. | Dropout of Dropout.neuron_typ
  29. | Reshape of Reshape.neuron_typ
  30. | Flatten of Flatten.neuron_typ
  31. | Slice of Slice.neuron_typ
  32. | Lambda of Lambda.neuron_typ
  33. | LambdaArray of LambdaArray.neuron_typ
  34. | Activation of Activation.neuron_typ
  35. | GaussianNoise of GaussianNoise.neuron_typ
  36. | GaussianDropout of GaussianDropout.neuron_typ
  37. | AlphaDropout of AlphaDropout.neuron_typ
  38. | Normalisation of Normalisation.neuron_typ
  39. | Add of Add.neuron_typ
  40. | Mul of Mul.neuron_typ
  41. | Dot of Dot.neuron_typ
  42. | Max of Max.neuron_typ
  43. | Average of Average.neuron_typ
  44. | Concatenate of Concatenate.neuron_typ
    (*

    Types of neuron.

    *)
val get_in_out_shape : neuron -> int array * int array

Get both input and output shapes of a neuron.

val get_in_shape : neuron -> int array

Get the input shape of a neuron.

val get_out_shape : neuron -> int array

Get the output shape of a neuron.

val connect : int array array -> neuron -> unit

Connect this neuron to others in a neural network.

val init : neuron -> unit

Initialise the neuron and its parameters.

val reset : neuron -> unit

Reset the parameters in a neuron.

val mktag : int -> neuron -> unit

Tag the neuron, used by Algodiff module.

val mkpar : neuron -> Optimise.Algodiff.t array

Assemble all the trainable parameters in an array, used by Optimise module.

val mkpri : neuron -> Optimise.Algodiff.t array

Assemble all the primal values in an array, used by Optimise module.

val mkadj : neuron -> Optimise.Algodiff.t array

Assemble all the adjacent values in an array, used by Optimise module.

val update : neuron -> Optimise.Algodiff.t array -> unit

Update trainable parameters in a neuron, used by Optimise module.

val load_weights : neuron -> Optimise.Algodiff.t array -> unit

Load both trainable and non-trainable parameters into the neuron.

val save_weights : neuron -> Optimise.Algodiff.t array

Assemble both trainable and non-trainable parameters of the neuron.

val copy : neuron -> neuron

Make a deep copy of the neuron and its parameters.

Execute the computation in this neuron.

val to_string : neuron -> string

Convert the neuron to its string representation. The string is often a summary of the parameters defined in the neuron.

val to_name : neuron -> string

Return the name of the neuron.

\ No newline at end of file diff --git a/owl-base/Owl_numdiff_generic/.dummy b/owl-base/Owl_numdiff_generic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_numdiff_generic/Make/argument-1-A/index.html b/owl-base/Owl_numdiff_generic/Make/argument-1-A/index.html deleted file mode 100644 index 10e353954..000000000 --- a/owl-base/Owl_numdiff_generic/Make/argument-1-A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_numdiff_generic.Make.A)

Parameter Make.A

include Owl_types_ndarray_numdiff.Sig with type elt = float
include Owl_types_ndarray_basic.Sig with type elt = float
type arr
type elt = float
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val mapi : (int -> elt -> elt) -> arr -> arr
val (+) : arr -> arr -> arr
val (-) : arr -> arr -> arr
val (*) : arr -> arr -> arr
val (/) : arr -> arr -> arr
val (+$) : arr -> elt -> arr
val (-$) : arr -> elt -> arr
val (*$) : arr -> elt -> arr
val (/$) : arr -> elt -> arr
\ No newline at end of file diff --git a/owl-base/Owl_numdiff_generic/Make/index.html b/owl-base/Owl_numdiff_generic/Make/index.html deleted file mode 100644 index 99d157ea7..000000000 --- a/owl-base/Owl_numdiff_generic/Make/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make (owl-base.Owl_numdiff_generic.Make)

Module Owl_numdiff_generic.Make

Parameters

module A : Owl_types.Ndarray_Numdiff with type elt = float

Signature

type arr = A.arr
type elt = A.elt
val _eps : float
val _ep1 : float
val _ep2 : float
val diff : (float -> float) -> float -> float
val diff' : (float -> float) -> float -> float * float
val diff2 : (float -> float) -> float -> float
val diff2' : (float -> float) -> float -> float * float
val grad' : (A.arr -> A.elt) -> A.arr -> A.arr * A.arr
val grad : (A.arr -> A.elt) -> A.arr -> A.arr
val jacobianT' : (A.arr -> A.arr) -> A.arr -> A.arr * A.arr
val jacobianT : (A.arr -> A.arr) -> A.arr -> A.arr
val jacobian' : (A.arr -> A.arr) -> A.arr -> A.arr * A.arr
val jacobian : (A.arr -> A.arr) -> A.arr -> A.arr
\ No newline at end of file diff --git a/owl-base/Owl_numdiff_generic_sig/.dummy b/owl-base/Owl_numdiff_generic_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_numdiff_generic_sig/Impl/argument-1-A/index.html b/owl-base/Owl_numdiff_generic_sig/Impl/argument-1-A/index.html deleted file mode 100644 index 7512664e9..000000000 --- a/owl-base/Owl_numdiff_generic_sig/Impl/argument-1-A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_numdiff_generic_sig.Impl.A)

Parameter Impl.A

include Owl_types_ndarray_numdiff.Sig with type elt = float
include Owl_types_ndarray_basic.Sig with type elt = float
type arr
type elt = float
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val mapi : (int -> elt -> elt) -> arr -> arr
val (+) : arr -> arr -> arr
val (-) : arr -> arr -> arr
val (*) : arr -> arr -> arr
val (/) : arr -> arr -> arr
val (+$) : arr -> elt -> arr
val (-$) : arr -> elt -> arr
val (*$) : arr -> elt -> arr
val (/$) : arr -> elt -> arr
\ No newline at end of file diff --git a/owl-base/Owl_numdiff_generic_sig/Impl/index.html b/owl-base/Owl_numdiff_generic_sig/Impl/index.html deleted file mode 100644 index c58b07804..000000000 --- a/owl-base/Owl_numdiff_generic_sig/Impl/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Impl (owl-base.Owl_numdiff_generic_sig.Impl)

Module Owl_numdiff_generic_sig.Impl

Parameters

module A : Owl_types.Ndarray_Numdiff with type elt = float

Signature

Type definition
type arr

General ndarray type

type elt

Scalar type

Basic functions
val diff : (elt -> elt) -> elt -> elt

derivative of f : scalar -> scalar.

val diff' : (elt -> elt) -> elt -> elt * elt

derivative of f : scalar -> scalar, return both f x and f' x.

val diff2 : (elt -> elt) -> elt -> elt

second order derivative of f : float -> float.

val diff2' : (elt -> elt) -> elt -> elt * elt

second order derivative of f : float -> float, return f x and f' x.

val grad : (arr -> elt) -> arr -> arr

gradient of f : vector -> scalar.

val grad' : (arr -> elt) -> arr -> arr * arr

gradient of f : vector -> scalar, return f x and g x.

val jacobian : (arr -> arr) -> arr -> arr

jacobian of f : vector -> vector.

val jacobian' : (arr -> arr) -> arr -> arr * arr

jacobian of f : vector -> vector, return f x and j x.

val jacobianT : (arr -> arr) -> arr -> arr

transposed jacobian of f : vector -> vector.

val jacobianT' : (arr -> arr) -> arr -> arr * arr

transposed jacobian of f : vector -> vector, return f x and j x.

\ No newline at end of file diff --git a/owl-base/Owl_numdiff_generic_sig/module-type-Sig/index.html b/owl-base/Owl_numdiff_generic_sig/module-type-Sig/index.html deleted file mode 100644 index 5cb87c4cc..000000000 --- a/owl-base/Owl_numdiff_generic_sig/module-type-Sig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Sig (owl-base.Owl_numdiff_generic_sig.Sig)

Module type Owl_numdiff_generic_sig.Sig

Type definition
type arr

General ndarray type

type elt

Scalar type

Basic functions
val diff : (elt -> elt) -> elt -> elt

derivative of f : scalar -> scalar.

val diff' : (elt -> elt) -> elt -> elt * elt

derivative of f : scalar -> scalar, return both f x and f' x.

val diff2 : (elt -> elt) -> elt -> elt

second order derivative of f : float -> float.

val diff2' : (elt -> elt) -> elt -> elt * elt

second order derivative of f : float -> float, return f x and f' x.

val grad : (arr -> elt) -> arr -> arr

gradient of f : vector -> scalar.

val grad' : (arr -> elt) -> arr -> arr * arr

gradient of f : vector -> scalar, return f x and g x.

val jacobian : (arr -> arr) -> arr -> arr

jacobian of f : vector -> vector.

val jacobian' : (arr -> arr) -> arr -> arr * arr

jacobian of f : vector -> vector, return f x and j x.

val jacobianT : (arr -> arr) -> arr -> arr

transposed jacobian of f : vector -> vector.

val jacobianT' : (arr -> arr) -> arr -> arr * arr

transposed jacobian of f : vector -> vector, return f x and j x.

\ No newline at end of file diff --git a/owl-base/Owl_operator/.dummy b/owl-base/Owl_operator/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_operator/Make_Basic/argument-1-M/index.html b/owl-base/Owl_operator/Make_Basic/argument-1-M/index.html deleted file mode 100644 index 813fda53e..000000000 --- a/owl-base/Owl_operator/Make_Basic/argument-1-M/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -M (owl-base.Owl_operator.Make_Basic.M)

Parameter Make_Basic.M

type ('a, 'b) t
val add : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val sub : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val mul : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val div : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val add_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val sub_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val mul_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val div_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val scalar_add : 'a -> ('a, 'b) t -> ('a, 'b) t
val scalar_sub : 'a -> ('a, 'b) t -> ('a, 'b) t
val scalar_mul : 'a -> ('a, 'b) t -> ('a, 'b) t
val scalar_div : 'a -> ('a, 'b) t -> ('a, 'b) t
val equal : ('a, 'b) t -> ('a, 'b) t -> bool
val not_equal : ('a, 'b) t -> ('a, 'b) t -> bool
val greater : ('a, 'b) t -> ('a, 'b) t -> bool
val less : ('a, 'b) t -> ('a, 'b) t -> bool
val greater_equal : ('a, 'b) t -> ('a, 'b) t -> bool
val less_equal : ('a, 'b) t -> ('a, 'b) t -> bool
\ No newline at end of file diff --git a/owl-base/Owl_operator/Make_Basic/index.html b/owl-base/Owl_operator/Make_Basic/index.html deleted file mode 100644 index c602b4e38..000000000 --- a/owl-base/Owl_operator/Make_Basic/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make_Basic (owl-base.Owl_operator.Make_Basic)

Module Owl_operator.Make_Basic

Parameters

Signature

val (+) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of add

val (-) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of sub

val (*) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of mul

val (/) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of div

val (+$) : ('a, 'b) M.t -> 'a -> ('a, 'b) M.t

Operator of add_scalar

val (-$) : ('a, 'b) M.t -> 'a -> ('a, 'b) M.t

Operator of sub_scalar

val (*$) : ('a, 'b) M.t -> 'a -> ('a, 'b) M.t

Operator of mul_scalar

val (/$) : ('a, 'b) M.t -> 'a -> ('a, 'b) M.t

Operator of div_scalar

val ($+) : 'a -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of scalar_add

val ($-) : 'a -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of scalar_sub

val ($*) : 'a -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of scalar_mul

val ($/) : 'a -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of scalar_div

val (=) : ('a, 'b) M.t -> ('a, 'b) M.t -> bool

Operator of equal

val (!=) : ('a, 'b) M.t -> ('a, 'b) M.t -> bool

Operator of not_equal

val (<>) : ('a, 'b) M.t -> ('a, 'b) M.t -> bool

Operator of not_equal

val (>) : ('a, 'b) M.t -> ('a, 'b) M.t -> bool

Operator of greater

val (<) : ('a, 'b) M.t -> ('a, 'b) M.t -> bool

Operator of less

val (>=) : ('a, 'b) M.t -> ('a, 'b) M.t -> bool

Operator of greater_equal

val (<=) : ('a, 'b) M.t -> ('a, 'b) M.t -> bool

Operator of less_equal

\ No newline at end of file diff --git a/owl-base/Owl_operator/Make_Extend/argument-1-M/index.html b/owl-base/Owl_operator/Make_Extend/argument-1-M/index.html deleted file mode 100644 index e335f3a43..000000000 --- a/owl-base/Owl_operator/Make_Extend/argument-1-M/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -M (owl-base.Owl_operator.Make_Extend.M)

Parameter Make_Extend.M

type ('a, 'b) t
val equal_scalar : ('a, 'b) t -> 'a -> bool
val not_equal_scalar : ('a, 'b) t -> 'a -> bool
val less_scalar : ('a, 'b) t -> 'a -> bool
val greater_scalar : ('a, 'b) t -> 'a -> bool
val less_equal_scalar : ('a, 'b) t -> 'a -> bool
val greater_equal_scalar : ('a, 'b) t -> 'a -> bool
val elt_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val elt_not_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val elt_less : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val elt_greater : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val elt_less_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val elt_greater_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val elt_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val elt_not_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val elt_less_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val elt_greater_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val elt_less_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val elt_greater_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val fmod : (float, 'a) t -> (float, 'a) t -> (float, 'a) t
val fmod_scalar : (float, 'a) t -> float -> (float, 'a) t
val pow : (float, 'a) t -> (float, 'a) t -> (float, 'a) t
val scalar_pow : float -> (float, 'a) t -> (float, 'a) t
val pow_scalar : (float, 'a) t -> float -> (float, 'a) t
val approx_equal : ?eps:float -> ('a, 'b) t -> ('a, 'b) t -> bool
val approx_equal_scalar : ?eps:float -> ('a, 'b) t -> 'a -> bool
val approx_elt_equal : ?eps:float -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val approx_elt_equal_scalar : ?eps:float -> ('a, 'b) t -> 'a -> ('a, 'b) t
val add_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit
val sub_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit
val mul_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit
val div_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit
val add_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit
val sub_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit
val mul_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit
val div_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit
val concat_vertical : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val concat_horizontal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val get_fancy_ext : Owl_types.index array -> ('a, 'b) t -> ('a, 'b) t
val set_fancy_ext : Owl_types.index array -> ('a, 'b) t -> ('a, 'b) t -> unit
val get_slice_ext : int list array -> ('a, 'b) t -> ('a, 'b) t
val set_slice_ext : int list array -> ('a, 'b) t -> ('a, 'b) t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_operator/Make_Extend/index.html b/owl-base/Owl_operator/Make_Extend/index.html deleted file mode 100644 index 8d27e21b0..000000000 --- a/owl-base/Owl_operator/Make_Extend/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make_Extend (owl-base.Owl_operator.Make_Extend)

Module Owl_operator.Make_Extend

Parameters

Signature

val (=$) : ('a, 'b) M.t -> 'a -> bool

Operator of equal_scalar

val (!=$) : ('a, 'b) M.t -> 'a -> bool

Operator of not_equal_scalar

val (<>$) : ('a, 'b) M.t -> 'a -> bool

Operator of not_equal_scalar

val (<$) : ('a, 'b) M.t -> 'a -> bool

Operator of less_scalar

val (>$) : ('a, 'b) M.t -> 'a -> bool

Operator of greater_scalar

val (<=$) : ('a, 'b) M.t -> 'a -> bool

Operator of less_equal_scalar

val (>=$) : ('a, 'b) M.t -> 'a -> bool

Operator of greater_equal_scalar

val (=.) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of elt_equal

val (!=.) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of elt_not_equal

val (<>.) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of elt_not_equal

val (<.) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of elt_less

val (>.) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of elt_greater

val (<=.) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of elt_less_equal

val (>=.) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of elt_greater_equal

val (=.$) : ('a, 'b) M.t -> 'a -> ('a, 'b) M.t

Operator of elt_equal_scalar

val (!=.$) : ('a, 'b) M.t -> 'a -> ('a, 'b) M.t

Operator of elt_not_equal_scalar

val (<>.$) : ('a, 'b) M.t -> 'a -> ('a, 'b) M.t

Operator of elt_not_equal_scalar

val (<.$) : ('a, 'b) M.t -> 'a -> ('a, 'b) M.t

Operator of elt_less_scalar

val (>.$) : ('a, 'b) M.t -> 'a -> ('a, 'b) M.t

Operator of elt_greater_scalar

val (<=.$) : ('a, 'b) M.t -> 'a -> ('a, 'b) M.t

Operator of elt_less_equal_scalar

val (>=.$) : ('a, 'b) M.t -> 'a -> ('a, 'b) M.t

Operator of elt_greater_equal_scalar

val (=~) : ?eps:float -> ('a, 'b) M.t -> ('a, 'b) M.t -> bool

Operator of approx_equal

val (=~$) : ?eps:float -> ('a, 'b) M.t -> 'a -> bool

Operator of approx_equal_scalar

val (=~.) : ?eps:float -> ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of approx_elt_equal

val (=~.$) : ?eps:float -> ('a, 'b) M.t -> 'a -> ('a, 'b) M.t

Operator of approx_elt_equal_scalar

val (%) : (float, 'a) M.t -> (float, 'a) M.t -> (float, 'a) M.t

Operator of fmod

val (%$) : (float, 'a) M.t -> float -> (float, 'a) M.t

Operator of fmod_scalar

val (**) : (float, 'a) M.t -> (float, 'a) M.t -> (float, 'a) M.t

Operator of pow

val ($**) : float -> (float, 'a) M.t -> (float, 'a) M.t

Operator of scalar_pow

val (**$) : (float, 'a) M.t -> float -> (float, 'a) M.t

Operator of pow_scalar

val (+=) : ('a, 'b) M.t -> ('a, 'b) M.t -> unit

Operator of add_

val (-=) : ('a, 'b) M.t -> ('a, 'b) M.t -> unit

Operator of sub_

val (*=) : ('a, 'b) M.t -> ('a, 'b) M.t -> unit

Operator of mul_

val (/=) : ('a, 'b) M.t -> ('a, 'b) M.t -> unit

Operator of div_

val (+$=) : ('a, 'b) M.t -> 'a -> unit

Operator of add_scalar_

val (-$=) : ('a, 'b) M.t -> 'a -> unit

Operator of sub_scalar_

val (*$=) : ('a, 'b) M.t -> 'a -> unit

Operator of mul_scalar_

val (/$=) : ('a, 'b) M.t -> 'a -> unit

Operator of div_scalar_

val (@=) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of concat_vertical

val (@||) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of concat_horizontal

val (.!{;..}) : ('a, 'b) M.t -> Owl_types.index array -> ('a, 'b) M.t

Operator of get_fancy

val (.!{;..}<-) : ('a, 'b) M.t -> Owl_types.index array -> ('a, 'b) M.t -> unit

Operator of set_fancy

val (.${}) : ('a, 'b) M.t -> int list -> ('a, 'b) M.t
val (.${;..}) : ('a, 'b) M.t -> int list array -> ('a, 'b) M.t

Operator of get_slice

val (.${}<-) : ('a, 'b) M.t -> int list -> ('a, 'b) M.t -> unit
val (.${;..}<-) : ('a, 'b) M.t -> int list array -> ('a, 'b) M.t -> unit

Operator of set_slice

\ No newline at end of file diff --git a/owl-base/Owl_operator/Make_Linalg/argument-1-M/index.html b/owl-base/Owl_operator/Make_Linalg/argument-1-M/index.html deleted file mode 100644 index 0c207cd71..000000000 --- a/owl-base/Owl_operator/Make_Linalg/argument-1-M/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -M (owl-base.Owl_operator.Make_Linalg.M)

Parameter Make_Linalg.M

type ('a, 'b) t
val mpow : ('a, 'b) t -> float -> ('a, 'b) t
val linsolve : - ?trans:bool -> - ?typ:[ `n | `u | `l ] -> - ('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t
\ No newline at end of file diff --git a/owl-base/Owl_operator/Make_Linalg/index.html b/owl-base/Owl_operator/Make_Linalg/index.html deleted file mode 100644 index 3fc1361cc..000000000 --- a/owl-base/Owl_operator/Make_Linalg/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make_Linalg (owl-base.Owl_operator.Make_Linalg)

Module Owl_operator.Make_Linalg

Parameters

Signature

val (**@) : ('a, 'b) M.t -> float -> ('a, 'b) M.t

Operator of mpow, i.e. matrix power.

val (/@) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of linsolve a b, i.e. for solving a linear system a * x = b.

\ No newline at end of file diff --git a/owl-base/Owl_operator/Make_Matrix/argument-1-M/index.html b/owl-base/Owl_operator/Make_Matrix/argument-1-M/index.html deleted file mode 100644 index 85ddcfb43..000000000 --- a/owl-base/Owl_operator/Make_Matrix/argument-1-M/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -M (owl-base.Owl_operator.Make_Matrix.M)

Parameter Make_Matrix.M

type ('a, 'b) t
val get : ('a, 'b) t -> int -> int -> 'a
val set : ('a, 'b) t -> int -> int -> 'a -> unit
val dot : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
\ No newline at end of file diff --git a/owl-base/Owl_operator/Make_Matrix/index.html b/owl-base/Owl_operator/Make_Matrix/index.html deleted file mode 100644 index e9765bfa1..000000000 --- a/owl-base/Owl_operator/Make_Matrix/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make_Matrix (owl-base.Owl_operator.Make_Matrix)

Module Owl_operator.Make_Matrix

Parameters

Signature

val (*@) : ('a, 'b) M.t -> ('a, 'b) M.t -> ('a, 'b) M.t

Operator of dot a b, i.e. matrix multiplication a * b.

val (.%{}) : ('a, 'b) M.t -> (int * int) -> 'a
val (.%{;..}) : ('a, 'b) M.t -> int array -> 'a

Operator of get

val (.%{}<-) : ('a, 'b) M.t -> (int * int) -> 'a -> unit
val (.%{;..}<-) : ('a, 'b) M.t -> int array -> 'a -> unit

Operator of set

\ No newline at end of file diff --git a/owl-base/Owl_operator/Make_Ndarray/argument-1-M/index.html b/owl-base/Owl_operator/Make_Ndarray/argument-1-M/index.html deleted file mode 100644 index 9c4c6df4d..000000000 --- a/owl-base/Owl_operator/Make_Ndarray/argument-1-M/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -M (owl-base.Owl_operator.Make_Ndarray.M)

Parameter Make_Ndarray.M

type ('a, 'b) t
val get : ('a, 'b) t -> int array -> 'a
val set : ('a, 'b) t -> int array -> 'a -> unit
\ No newline at end of file diff --git a/owl-base/Owl_operator/Make_Ndarray/index.html b/owl-base/Owl_operator/Make_Ndarray/index.html deleted file mode 100644 index 54ce7f64d..000000000 --- a/owl-base/Owl_operator/Make_Ndarray/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make_Ndarray (owl-base.Owl_operator.Make_Ndarray)

Module Owl_operator.Make_Ndarray

Parameters

Signature

val (.%{}) : ('a, 'b) M.t -> int -> 'a
val (.%{;..}) : ('a, 'b) M.t -> int array -> 'a

Operator of get

val (.%{}<-) : ('a, 'b) M.t -> int -> 'a -> unit
val (.%{;..}<-) : ('a, 'b) M.t -> int array -> 'a -> unit

Operator of set

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/.dummy b/owl-base/Owl_optimise_generic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_optimise_generic/Make/Batch/index.html b/owl-base/Owl_optimise_generic/Make/Batch/index.html deleted file mode 100644 index 34a073074..000000000 --- a/owl-base/Owl_optimise_generic/Make/Batch/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Batch (owl-base.Owl_optimise_generic.Make.Batch)

Module Make.Batch

type typ =
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic
val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val batches : typ -> Algodiff.t -> int
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/Checkpoint/index.html b/owl-base/Owl_optimise_generic/Make/Checkpoint/index.html deleted file mode 100644 index c4d74b390..000000000 --- a/owl-base/Owl_optimise_generic/Make/Checkpoint/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Checkpoint (owl-base.Owl_optimise_generic.Make.Checkpoint)

Module Make.Checkpoint

type state = {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}
type typ =
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None
val init_state : int -> float -> state
val default_checkpoint_fun : (string -> 'a) -> 'b
val print_state_info : state -> unit
val print_summary : state -> unit
val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/Clipping/index.html b/owl-base/Owl_optimise_generic/Make/Clipping/index.html deleted file mode 100644 index 7dc5d33e1..000000000 --- a/owl-base/Owl_optimise_generic/Make/Clipping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Clipping (owl-base.Owl_optimise_generic.Make.Clipping)

Module Make.Clipping

type typ =
  1. | L2norm of float
  2. | Value of float * float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/Gradient/index.html b/owl-base/Owl_optimise_generic/Make/Gradient/index.html deleted file mode 100644 index a20c73d96..000000000 --- a/owl-base/Owl_optimise_generic/Make/Gradient/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Gradient (owl-base.Owl_optimise_generic.Make.Gradient)

Module Make.Gradient

type typ =
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton
val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/Learning_Rate/index.html b/owl-base/Owl_optimise_generic/Make/Learning_Rate/index.html deleted file mode 100644 index 9c21f0dc6..000000000 --- a/owl-base/Owl_optimise_generic/Make/Learning_Rate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Learning_Rate (owl-base.Owl_optimise_generic.Make.Learning_Rate)

Module Make.Learning_Rate

type typ =
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t
val default : typ -> typ
val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/Loss/index.html b/owl-base/Owl_optimise_generic/Make/Loss/index.html deleted file mode 100644 index cff1279cd..000000000 --- a/owl-base/Owl_optimise_generic/Make/Loss/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Loss (owl-base.Owl_optimise_generic.Make.Loss)

Module Make.Loss

type typ =
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/Momentum/index.html b/owl-base/Owl_optimise_generic/Make/Momentum/index.html deleted file mode 100644 index 7a7169fd0..000000000 --- a/owl-base/Owl_optimise_generic/Make/Momentum/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Momentum (owl-base.Owl_optimise_generic.Make.Momentum)

Module Make.Momentum

type typ =
  1. | Standard of float
  2. | Nesterov of float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/Params/index.html b/owl-base/Owl_optimise_generic/Make/Params/index.html deleted file mode 100644 index 0cfd71862..000000000 --- a/owl-base/Owl_optimise_generic/Make/Params/index.html +++ /dev/null @@ -1,14 +0,0 @@ - -Params (owl-base.Owl_optimise_generic.Make.Params)

Module Make.Params

type typ = {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}
val default : unit -> typ
val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/Regularisation/index.html b/owl-base/Owl_optimise_generic/Make/Regularisation/index.html deleted file mode 100644 index 58e81bbc8..000000000 --- a/owl-base/Owl_optimise_generic/Make/Regularisation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Regularisation (owl-base.Owl_optimise_generic.Make.Regularisation)

Module Make.Regularisation

type typ =
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None
val run : typ -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/Stopping/index.html b/owl-base/Owl_optimise_generic/Make/Stopping/index.html deleted file mode 100644 index 634e20ce3..000000000 --- a/owl-base/Owl_optimise_generic/Make/Stopping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Stopping (owl-base.Owl_optimise_generic.Make.Stopping)

Module Make.Stopping

type typ =
  1. | Const of float
  2. | Early of int * int
  3. | None
val run : typ -> float -> bool
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/Utils/index.html b/owl-base/Owl_optimise_generic/Make/Utils/index.html deleted file mode 100644 index 8c37c3dd2..000000000 --- a/owl-base/Owl_optimise_generic/Make/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl-base.Owl_optimise_generic.Make.Utils)

Module Make.Utils

val sample_num : Algodiff.t -> int
val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/Linalg/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/Linalg/index.html deleted file mode 100644 index f40a7aa3b..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_optimise_generic.Make.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/Mat/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/Mat/index.html deleted file mode 100644 index eb177c987..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_optimise_generic.Make.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/Scalar/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/Scalar/index.html deleted file mode 100644 index 886763be2..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_optimise_generic.Make.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/index.html deleted file mode 100644 index 406dca810..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_optimise_generic.Make.Algodiff.A)

Module Algodiff.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Arr/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Arr/index.html deleted file mode 100644 index 33d5d99e5..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_optimise_generic.Make.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/index.html deleted file mode 100644 index 0b4e115da..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_optimise_generic.Make.Algodiff.Builder)

Module Algodiff.Builder

Ops Builder
module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t

build array input single output operations

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Aiso/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index 784469c87..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_optimise_generic.Make.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Piso/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index cf8e2dfa9..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_optimise_generic.Make.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Siao/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index fda2c757d..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_optimise_generic.Make.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Sipo/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index 5c4822e17..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_optimise_generic.Make.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Siso/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index dbda5bdad..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_optimise_generic.Make.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Sito/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index f4351d19d..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_optimise_generic.Make.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Linalg/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Linalg/index.html deleted file mode 100644 index 2f0fa9012..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_optimise_generic.Make.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val logdet : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val chol : ?upper:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val qr : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val lq : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val svd : ?thin:bool -> t -> t * t * t

Refer to :doc:`owl_dense_ndarray_generic`

val sylvester : t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val lyapunov : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - t -> - t -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val (/@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val care : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dare : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Mat/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Mat/index.html deleted file mode 100644 index 08747a1a0..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_optimise_generic.Make.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Maths/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Maths/index.html deleted file mode 100644 index 8b068fcbf..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_optimise_generic.Make.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (-) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (/) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (**) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val add : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val div : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val kron : t -> t -> t

Refer to :doc:`owl_dense_matrix_generic`

val dot : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cross_entropy : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val abs : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val round : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mean : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val swap : int -> int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_item : t -> int -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_row : t -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val concat : axis:int -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val split : axis:int -> int array -> t -> t array

Refer to :doc:`owl_dense_ndarray_generic`

val of_arrays : t array array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val to_arrays : t -> t array array

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diag : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diagm : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val trace : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val triu : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tril : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/NN/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/NN/index.html deleted file mode 100644 index 73613537e..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl-base.Owl_optimise_generic.Make.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:A.elt -> int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/index.html b/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/index.html deleted file mode 100644 index b6d8d152b..000000000 --- a/owl-base/Owl_optimise_generic/Make/argument-1-Algodiff/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Algodiff (owl-base.Owl_optimise_generic.Make.Algodiff)

Parameter Make.Algodiff

include Owl_algodiff_core_sig.Sig
Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

val make_forward : t -> t -> int -> t

TODO

val make_reverse : t -> int -> t

TODO

val reverse_prop : t -> t -> unit

TODO

val diff : (t -> t) -> t -> t

diff f x returns the exat derivative of a function f : scalar -> scalar at point x. Simply calling diff f will return its derivative function g of the same type, i.e. g : scalar -> scalar.

Keep calling this function will give you higher-order derivatives of f, i.e. f |> diff |> diff |> diff |> ...

val diff' : (t -> t) -> t -> t * t

similar to diff, but return (f x, diff f x).

val grad : (t -> t) -> t -> t

gradient of f : (vector -> scalar) at x, reverse ad.

val grad' : (t -> t) -> t -> t * t

similar to grad, but return (f x, grad f x).

val jacobian : (t -> t) -> t -> t

jacobian of f : (vector -> vector) at x, both x and y are row vectors.

val jacobian' : (t -> t) -> t -> t * t

similar to jacobian, but return (f x, jacobian f x)

val jacobianv : (t -> t) -> t -> t -> t

jacobian vector product of f : (vector -> vector) at x along v, forward ad. Namely, it calcultes (jacobian x) v

val jacobianv' : (t -> t) -> t -> t -> t * t

similar to jacobianv', but return (f x, jacobianv f x v)

val jacobianTv : (t -> t) -> t -> t -> t

transposed jacobian vector product of f : (vector -> vector) at x along v, backward ad. Namely, it calculates transpose ((jacobianv f x v)).

val jacobianTv' : (t -> t) -> t -> t -> t * t

similar to jacobianTv, but return (f x, transpose (jacobianv f x v))

val hessian : (t -> t) -> t -> t

hessian of f : (scalar -> scalar) at x.

val hessian' : (t -> t) -> t -> t * t

simiarl to hessian, but return (f x, hessian f x)

val hessianv : (t -> t) -> t -> t -> t

hessian vector product of f : (scalar -> scalar) at x along v. Namely, it calculates (hessian x) v.

val hessianv' : (t -> t) -> t -> t -> t * t

similar to hessianv, but return (f x, hessianv f x v).

val laplacian : (t -> t) -> t -> t

laplacian of f : (scalar -> scalar) at x.

val laplacian' : (t -> t) -> t -> t * t

similar to laplacian, but return (f x, laplacian f x).

val gradhessian : (t -> t) -> t -> t * t

return (grad f x, hessian f x), f : (scalar -> scalar)

val gradhessian' : (t -> t) -> t -> t * t * t

return (f x, grad f x, hessian f x)

val gradhessianv : (t -> t) -> t -> t -> t * t

return (grad f x v, hessian f x v)

val gradhessianv' : (t -> t) -> t -> t -> t * t * t

return (f x, grad f x v, hessian f x v)

include Owl_algodiff_ops_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
module Builder : - Owl_algodiff_ops_builder_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
Supported Maths functions
module Maths : sig ... end
Supported Linalg functions
module Linalg : sig ... end
Supported Neural Network functions
module NN : sig ... end
Supported Mat functions
module Mat : sig ... end
Supported Arr functions
module Arr : sig ... end
Helper functions
include Owl_algodiff_graph_convert_sig.Sig with type t := t
val to_trace : t list -> string

to_trace [t0; t1; ...] outputs the trace of computation graph on the terminal in a human-readable format.

val to_dot : t list -> string

to_dot [t0; t1; ...] outputs the trace of computation graph in the dot file format which you can use other tools further visualisation, such as Graphviz.

val pp_num : Stdlib.Format.formatter -> t -> unit

pp_num t pretty prints the abstract number used in Algodiff.

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic/Make/index.html b/owl-base/Owl_optimise_generic/Make/index.html deleted file mode 100644 index 247350b14..000000000 --- a/owl-base/Owl_optimise_generic/Make/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Make (owl-base.Owl_optimise_generic.Make)

Module Owl_optimise_generic.Make

Parameters

Signature

module Algodiff = Algodiff
module Utils : sig ... end
module Learning_Rate : sig ... end
module Batch : sig ... end
module Loss : sig ... end
module Gradient : sig ... end
module Momentum : sig ... end
module Regularisation : sig ... end
module Clipping : sig ... end
module Stopping : sig ... end
module Checkpoint : sig ... end
module Params : sig ... end
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/.dummy b/owl-base/Owl_optimise_generic_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/Linalg/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/Linalg/index.html deleted file mode 100644 index bf4fac49c..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/Mat/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/Mat/index.html deleted file mode 100644 index 1f2d6e889..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/Scalar/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/Scalar/index.html deleted file mode 100644 index 1c591dc7c..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/index.html deleted file mode 100644 index 9665bec63..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.A)

Module Algodiff.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Arr/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Arr/index.html deleted file mode 100644 index 1f4215a0c..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/index.html deleted file mode 100644 index 7e0d82bbe..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.Builder)

Module Algodiff.Builder

Ops Builder
module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t

build array input single output operations

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Aiso/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index 29cda5594..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Piso/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index f84af55f1..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Siao/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index 7c65a4418..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Sipo/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index 5cfdb7e9f..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Siso/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index 8d974b323..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Sito/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index 8960fdcd9..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Linalg/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Linalg/index.html deleted file mode 100644 index ca1700d2c..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val logdet : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val chol : ?upper:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val qr : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val lq : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val svd : ?thin:bool -> t -> t * t * t

Refer to :doc:`owl_dense_ndarray_generic`

val sylvester : t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val lyapunov : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - t -> - t -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val (/@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val care : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dare : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Mat/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Mat/index.html deleted file mode 100644 index 1432c291c..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Maths/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Maths/index.html deleted file mode 100644 index 3cd9deb56..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (-) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (/) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (**) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val add : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val div : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val kron : t -> t -> t

Refer to :doc:`owl_dense_matrix_generic`

val dot : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cross_entropy : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val abs : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val round : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mean : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val swap : int -> int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_item : t -> int -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_row : t -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val concat : axis:int -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val split : axis:int -> int array -> t -> t array

Refer to :doc:`owl_dense_ndarray_generic`

val of_arrays : t array array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val to_arrays : t -> t array array

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diag : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diagm : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val trace : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val triu : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tril : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/NN/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/NN/index.html deleted file mode 100644 index 7b18f2f63..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl-base.Owl_optimise_generic_sig.Sig.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:A.elt -> int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/index.html deleted file mode 100644 index 1b47d94b3..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Algodiff/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Algodiff (owl-base.Owl_optimise_generic_sig.Sig.Algodiff)

Module Sig.Algodiff

include Owl_algodiff_core_sig.Sig
Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

val make_forward : t -> t -> int -> t

TODO

val make_reverse : t -> int -> t

TODO

val reverse_prop : t -> t -> unit

TODO

val diff : (t -> t) -> t -> t

diff f x returns the exat derivative of a function f : scalar -> scalar at point x. Simply calling diff f will return its derivative function g of the same type, i.e. g : scalar -> scalar.

Keep calling this function will give you higher-order derivatives of f, i.e. f |> diff |> diff |> diff |> ...

val diff' : (t -> t) -> t -> t * t

similar to diff, but return (f x, diff f x).

val grad : (t -> t) -> t -> t

gradient of f : (vector -> scalar) at x, reverse ad.

val grad' : (t -> t) -> t -> t * t

similar to grad, but return (f x, grad f x).

val jacobian : (t -> t) -> t -> t

jacobian of f : (vector -> vector) at x, both x and y are row vectors.

val jacobian' : (t -> t) -> t -> t * t

similar to jacobian, but return (f x, jacobian f x)

val jacobianv : (t -> t) -> t -> t -> t

jacobian vector product of f : (vector -> vector) at x along v, forward ad. Namely, it calcultes (jacobian x) v

val jacobianv' : (t -> t) -> t -> t -> t * t

similar to jacobianv', but return (f x, jacobianv f x v)

val jacobianTv : (t -> t) -> t -> t -> t

transposed jacobian vector product of f : (vector -> vector) at x along v, backward ad. Namely, it calculates transpose ((jacobianv f x v)).

val jacobianTv' : (t -> t) -> t -> t -> t * t

similar to jacobianTv, but return (f x, transpose (jacobianv f x v))

val hessian : (t -> t) -> t -> t

hessian of f : (scalar -> scalar) at x.

val hessian' : (t -> t) -> t -> t * t

simiarl to hessian, but return (f x, hessian f x)

val hessianv : (t -> t) -> t -> t -> t

hessian vector product of f : (scalar -> scalar) at x along v. Namely, it calculates (hessian x) v.

val hessianv' : (t -> t) -> t -> t -> t * t

similar to hessianv, but return (f x, hessianv f x v).

val laplacian : (t -> t) -> t -> t

laplacian of f : (scalar -> scalar) at x.

val laplacian' : (t -> t) -> t -> t * t

similar to laplacian, but return (f x, laplacian f x).

val gradhessian : (t -> t) -> t -> t * t

return (grad f x, hessian f x), f : (scalar -> scalar)

val gradhessian' : (t -> t) -> t -> t * t * t

return (f x, grad f x, hessian f x)

val gradhessianv : (t -> t) -> t -> t -> t * t

return (grad f x v, hessian f x v)

val gradhessianv' : (t -> t) -> t -> t -> t * t * t

return (f x, grad f x v, hessian f x v)

include Owl_algodiff_ops_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
module Builder : - Owl_algodiff_ops_builder_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
Supported Maths functions
module Maths : sig ... end
Supported Linalg functions
module Linalg : sig ... end
Supported Neural Network functions
module NN : sig ... end
Supported Mat functions
module Mat : sig ... end
Supported Arr functions
module Arr : sig ... end
Helper functions
include Owl_algodiff_graph_convert_sig.Sig with type t := t
val to_trace : t list -> string

to_trace [t0; t1; ...] outputs the trace of computation graph on the terminal in a human-readable format.

val to_dot : t list -> string

to_dot [t0; t1; ...] outputs the trace of computation graph in the dot file format which you can use other tools further visualisation, such as Graphviz.

val pp_num : Stdlib.Format.formatter -> t -> unit

pp_num t pretty prints the abstract number used in Algodiff.

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Batch/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Batch/index.html deleted file mode 100644 index 744e96490..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Batch/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Batch (owl-base.Owl_optimise_generic_sig.Sig.Batch)

Module Sig.Batch

Batch module

type typ =
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic

Types of batches.

val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

Execute the computations defined in module typ.

val batches : typ -> Algodiff.t -> int

Return the total number of batches given a batch typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Checkpoint/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Checkpoint/index.html deleted file mode 100644 index 76c6d9746..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Checkpoint/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Checkpoint (owl-base.Owl_optimise_generic_sig.Sig.Checkpoint)

Module Sig.Checkpoint

Checkpoint module

type state = {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}

Type definition of checkpoint

type typ =
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None

Batch type.

val init_state : int -> float -> state

init_state batches_per_epoch epochs initialises a state by specifying the number of batches per epoch and the number of epochs in total.

val default_checkpoint_fun : (string -> 'a) -> 'a

This function is used for saving intermediate files during optimisation.

val print_state_info : state -> unit

Print out the detail information of current state.

val print_summary : state -> unit

Print out the summary of current state.

val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Clipping/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Clipping/index.html deleted file mode 100644 index 4bde3e912..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Clipping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Clipping (owl-base.Owl_optimise_generic_sig.Sig.Clipping)

Module Sig.Clipping

Clipping module

type typ =
  1. | L2norm of float
  2. | Value of float * float
  3. | None

Types of clipping functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Gradient/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Gradient/index.html deleted file mode 100644 index bda2a183f..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Gradient/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Gradient (owl-base.Owl_optimise_generic_sig.Sig.Gradient)

Module Sig.Gradient

Gradient module

type typ =
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton

Types of gradient function.

val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Learning_Rate/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Learning_Rate/index.html deleted file mode 100644 index 1eae2fe53..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Learning_Rate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Learning_Rate (owl-base.Owl_optimise_generic_sig.Sig.Learning_Rate)

Module Sig.Learning_Rate

Strategies for learning rate update

type typ =
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array

Representation of learning rate update strategies. Possible values include:

  • Adam (alpha, beta1, beta2), see ref for parameter meaning
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array

Update the cache of gradients.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Loss/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Loss/index.html deleted file mode 100644 index 81303e857..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Loss/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Loss (owl-base.Owl_optimise_generic_sig.Sig.Loss)

Module Sig.Loss

Loss module

type typ =
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t

Types of loss functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Momentum/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Momentum/index.html deleted file mode 100644 index c20cee57a..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Momentum/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Momentum (owl-base.Owl_optimise_generic_sig.Sig.Momentum)

Module Sig.Momentum

Momentum module

type typ =
  1. | Standard of float
  2. | Nesterov of float
  3. | None

Types of momentum functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Params/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Params/index.html deleted file mode 100644 index e0f339dea..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Params/index.html +++ /dev/null @@ -1,14 +0,0 @@ - -Params (owl-base.Owl_optimise_generic_sig.Sig.Params)

Module Sig.Params

Params module

type typ = {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}

Type definition of parameter.

val default : unit -> typ

Create module typ with default values.

val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ

This function creates a parameter object with many configurations.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Regularisation/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Regularisation/index.html deleted file mode 100644 index 17b9f18ba..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Regularisation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Regularisation (owl-base.Owl_optimise_generic_sig.Sig.Regularisation)

Module Sig.Regularisation

Regularisation module

type typ =
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None

Types of regularisation functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Stopping/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Stopping/index.html deleted file mode 100644 index 51be6006d..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Stopping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Stopping (owl-base.Owl_optimise_generic_sig.Sig.Stopping)

Module Sig.Stopping

Stopping module

type typ =
  1. | Const of float
  2. | Early of int * int
  3. | None

Types of stopping functions.

val run : typ -> float -> bool

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Utils/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/Utils/index.html deleted file mode 100644 index 5089bd5b4..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl-base.Owl_optimise_generic_sig.Sig.Utils)

Module Sig.Utils

Utils module

val sample_num : Algodiff.t -> int

Return the total number of samples in passed in ndarray.

val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

draw_samples x y draws samples from both x (observations) and y (labels). The samples will be drew along axis 0, so x and y must agree along axis 0.

val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t

get_chunk x y i c gets a continuous chunk of c samples from position i from x (observations) and y (labels).

\ No newline at end of file diff --git a/owl-base/Owl_optimise_generic_sig/module-type-Sig/index.html b/owl-base/Owl_optimise_generic_sig/module-type-Sig/index.html deleted file mode 100644 index 9367c4860..000000000 --- a/owl-base/Owl_optimise_generic_sig/module-type-Sig/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Sig (owl-base.Owl_optimise_generic_sig.Sig)

Module type Owl_optimise_generic_sig.Sig

module Utils : sig ... end

Utils module

module Learning_Rate : sig ... end

Strategies for learning rate update

module Batch : sig ... end

Batch module

module Loss : sig ... end

Loss module

module Gradient : sig ... end

Gradient module

module Momentum : sig ... end

Momentum module

module Regularisation : sig ... end

Regularisation module

module Clipping : sig ... end

Clipping module

module Stopping : sig ... end

Stopping module

module Checkpoint : sig ... end

Checkpoint module

module Params : sig ... end

Params module

Core functions
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises the weight w of passed-in function f.

* f is a function f : w -> x -> y. * w is a row vector but y can have any shape.

val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

This function is specifically designed for minimising the weights in a neural network of graph structure. In Owl's earlier versions, the functions in the regression module were actually implemented using this function.

val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises f : x -> y w.r.t x.

x is an ndarray; and y is an scalar value.

val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

TODO

\ No newline at end of file diff --git a/owl-base/Owl_pretty/.dummy b/owl-base/Owl_pretty/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types/.dummy b/owl-base/Owl_types/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types/module-type-Computation_Device/A/Linalg/index.html b/owl-base/Owl_types/module-type-Computation_Device/A/Linalg/index.html deleted file mode 100644 index 668823841..000000000 --- a/owl-base/Owl_types/module-type-Computation_Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_types.Computation_Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Computation_Device/A/Mat/index.html b/owl-base/Owl_types/module-type-Computation_Device/A/Mat/index.html deleted file mode 100644 index 31f713f00..000000000 --- a/owl-base/Owl_types/module-type-Computation_Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_types.Computation_Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Computation_Device/A/Scalar/index.html b/owl-base/Owl_types/module-type-Computation_Device/A/Scalar/index.html deleted file mode 100644 index bc2be6c04..000000000 --- a/owl-base/Owl_types/module-type-Computation_Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_types.Computation_Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Computation_Device/A/index.html b/owl-base/Owl_types/module-type-Computation_Device/A/index.html deleted file mode 100644 index aedf59e09..000000000 --- a/owl-base/Owl_types/module-type-Computation_Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_types.Computation_Device.A)

Module Computation_Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Computation_Device/index.html b/owl-base/Owl_types/module-type-Computation_Device/index.html deleted file mode 100644 index 4bacd2607..000000000 --- a/owl-base/Owl_types/module-type-Computation_Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Computation_Device (owl-base.Owl_types.Computation_Device)

Module type Owl_types.Computation_Device

include Owl_types_computation_device.Sig
Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Ndarray_Algodiff/Linalg/index.html b/owl-base/Owl_types/module-type-Ndarray_Algodiff/Linalg/index.html deleted file mode 100644 index 6682334e7..000000000 --- a/owl-base/Owl_types/module-type-Ndarray_Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_types.Ndarray_Algodiff.Linalg)

Module Ndarray_Algodiff.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Ndarray_Algodiff/Mat/index.html b/owl-base/Owl_types/module-type-Ndarray_Algodiff/Mat/index.html deleted file mode 100644 index 8d1252a55..000000000 --- a/owl-base/Owl_types/module-type-Ndarray_Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_types.Ndarray_Algodiff.Mat)

Module Ndarray_Algodiff.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Ndarray_Algodiff/Scalar/index.html b/owl-base/Owl_types/module-type-Ndarray_Algodiff/Scalar/index.html deleted file mode 100644 index 77f96bb7d..000000000 --- a/owl-base/Owl_types/module-type-Ndarray_Algodiff/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_types.Ndarray_Algodiff.Scalar)

Module Ndarray_Algodiff.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Ndarray_Algodiff/index.html b/owl-base/Owl_types/module-type-Ndarray_Algodiff/index.html deleted file mode 100644 index c4ba30153..000000000 --- a/owl-base/Owl_types/module-type-Ndarray_Algodiff/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -Ndarray_Algodiff (owl-base.Owl_types.Ndarray_Algodiff)

Module type Owl_types.Ndarray_Algodiff

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Ndarray_Basic/index.html b/owl-base/Owl_types/module-type-Ndarray_Basic/index.html deleted file mode 100644 index 5a7dfd6cf..000000000 --- a/owl-base/Owl_types/module-type-Ndarray_Basic/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -Ndarray_Basic (owl-base.Owl_types.Ndarray_Basic)

Module type Owl_types.Ndarray_Basic

include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Ndarray_Compare/index.html b/owl-base/Owl_types/module-type-Ndarray_Compare/index.html deleted file mode 100644 index 39918e2c5..000000000 --- a/owl-base/Owl_types/module-type-Ndarray_Compare/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -Ndarray_Compare (owl-base.Owl_types.Ndarray_Compare)

Module type Owl_types.Ndarray_Compare

include Owl_types_ndarray_compare.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val less : arr -> arr -> bool
val greater : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> elt -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> elt -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Ndarray_Mutable/Linalg/index.html b/owl-base/Owl_types/module-type-Ndarray_Mutable/Linalg/index.html deleted file mode 100644 index 1e131b792..000000000 --- a/owl-base/Owl_types/module-type-Ndarray_Mutable/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_types.Ndarray_Mutable.Linalg)

Module Ndarray_Mutable.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Ndarray_Mutable/Mat/index.html b/owl-base/Owl_types/module-type-Ndarray_Mutable/Mat/index.html deleted file mode 100644 index 27f0da29a..000000000 --- a/owl-base/Owl_types/module-type-Ndarray_Mutable/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_types.Ndarray_Mutable.Mat)

Module Ndarray_Mutable.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Ndarray_Mutable/Scalar/index.html b/owl-base/Owl_types/module-type-Ndarray_Mutable/Scalar/index.html deleted file mode 100644 index cde4849ab..000000000 --- a/owl-base/Owl_types/module-type-Ndarray_Mutable/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_types.Ndarray_Mutable.Scalar)

Module Ndarray_Mutable.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Ndarray_Mutable/index.html b/owl-base/Owl_types/module-type-Ndarray_Mutable/index.html deleted file mode 100644 index e7529fed9..000000000 --- a/owl-base/Owl_types/module-type-Ndarray_Mutable/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -Ndarray_Mutable (owl-base.Owl_types.Ndarray_Mutable)

Module type Owl_types.Ndarray_Mutable

include Owl_types_ndarray_mutable.Sig
include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Ndarray_Numdiff/index.html b/owl-base/Owl_types/module-type-Ndarray_Numdiff/index.html deleted file mode 100644 index af0786677..000000000 --- a/owl-base/Owl_types/module-type-Ndarray_Numdiff/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -Ndarray_Numdiff (owl-base.Owl_types.Ndarray_Numdiff)

Module type Owl_types.Ndarray_Numdiff

include Owl_types_ndarray_numdiff.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val mapi : (int -> elt -> elt) -> arr -> arr
val (+) : arr -> arr -> arr
val (-) : arr -> arr -> arr
val (*) : arr -> arr -> arr
val (/) : arr -> arr -> arr
val (+$) : arr -> elt -> arr
val (-$) : arr -> elt -> arr
val (*$) : arr -> elt -> arr
val (/$) : arr -> elt -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Stats_Dist/Linalg/index.html b/owl-base/Owl_types/module-type-Stats_Dist/Linalg/index.html deleted file mode 100644 index 76824ea65..000000000 --- a/owl-base/Owl_types/module-type-Stats_Dist/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_types.Stats_Dist.Linalg)

Module Stats_Dist.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Stats_Dist/Mat/index.html b/owl-base/Owl_types/module-type-Stats_Dist/Mat/index.html deleted file mode 100644 index 76620900c..000000000 --- a/owl-base/Owl_types/module-type-Stats_Dist/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_types.Stats_Dist.Mat)

Module Stats_Dist.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Stats_Dist/Scalar/index.html b/owl-base/Owl_types/module-type-Stats_Dist/Scalar/index.html deleted file mode 100644 index 27896bf1b..000000000 --- a/owl-base/Owl_types/module-type-Stats_Dist/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_types.Stats_Dist.Scalar)

Module Stats_Dist.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_types/module-type-Stats_Dist/index.html b/owl-base/Owl_types/module-type-Stats_Dist/index.html deleted file mode 100644 index abf40b810..000000000 --- a/owl-base/Owl_types/module-type-Stats_Dist/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -Stats_Dist (owl-base.Owl_types.Stats_Dist)

Module type Owl_types.Stats_Dist

include Owl_types_stats_dist.Sig
include Owl_types_ndarray_mutable.Sig
include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
val uniform_rvs : a:arr -> b:arr -> n:int -> arr
val uniform_pdf : a:arr -> b:arr -> arr -> arr
val uniform_logpdf : a:arr -> b:arr -> arr -> arr
val uniform_cdf : a:arr -> b:arr -> arr -> arr
val uniform_logcdf : a:arr -> b:arr -> arr -> arr
val uniform_ppf : a:arr -> b:arr -> arr -> arr
val uniform_sf : a:arr -> b:arr -> arr -> arr
val uniform_logsf : a:arr -> b:arr -> arr -> arr
val uniform_isf : a:arr -> b:arr -> arr -> arr
val gaussian_rvs : mu:arr -> sigma:arr -> n:int -> arr
val gaussian_pdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logpdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_cdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logcdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_ppf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_sf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logsf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_isf : mu:arr -> sigma:arr -> arr -> arr
val exponential_rvs : lambda:arr -> n:int -> arr
val exponential_pdf : lambda:arr -> arr -> arr
val exponential_logpdf : lambda:arr -> arr -> arr
val exponential_cdf : lambda:arr -> arr -> arr
val exponential_logcdf : lambda:arr -> arr -> arr
val exponential_ppf : lambda:arr -> arr -> arr
val exponential_sf : lambda:arr -> arr -> arr
val exponential_logsf : lambda:arr -> arr -> arr
val exponential_isf : lambda:arr -> arr -> arr
val poisson_rvs : mu:arr -> n:int -> arr
val gamma_rvs : shape:arr -> scale:arr -> n:int -> arr
val gamma_pdf : shape:arr -> scale:arr -> arr -> arr
val gamma_logpdf : shape:arr -> scale:arr -> arr -> arr
val gamma_cdf : shape:arr -> scale:arr -> arr -> arr
val gamma_logcdf : shape:arr -> scale:arr -> arr -> arr
val gamma_ppf : shape:arr -> scale:arr -> arr -> arr
val gamma_sf : shape:arr -> scale:arr -> arr -> arr
val gamma_logsf : shape:arr -> scale:arr -> arr -> arr
val gamma_isf : shape:arr -> scale:arr -> arr -> arr
val beta_rvs : a:arr -> b:arr -> n:int -> arr
val beta_pdf : a:arr -> b:arr -> arr -> arr
val beta_logpdf : a:arr -> b:arr -> arr -> arr
val beta_cdf : a:arr -> b:arr -> arr -> arr
val beta_logcdf : a:arr -> b:arr -> arr -> arr
val beta_ppf : a:arr -> b:arr -> arr -> arr
val beta_sf : a:arr -> b:arr -> arr -> arr
val beta_logsf : a:arr -> b:arr -> arr -> arr
val beta_isf : a:arr -> b:arr -> arr -> arr
val chi2_rvs : df:arr -> n:int -> arr
val chi2_pdf : df:arr -> arr -> arr
val chi2_logpdf : df:arr -> arr -> arr
val chi2_cdf : df:arr -> arr -> arr
val chi2_logcdf : df:arr -> arr -> arr
val chi2_ppf : df:arr -> arr -> arr
val chi2_sf : df:arr -> arr -> arr
val chi2_logsf : df:arr -> arr -> arr
val chi2_isf : df:arr -> arr -> arr
val f_rvs : dfnum:arr -> dfden:arr -> n:int -> arr
val f_pdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logpdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_cdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logcdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_ppf : dfnum:arr -> dfden:arr -> arr -> arr
val f_sf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logsf : dfnum:arr -> dfden:arr -> arr -> arr
val f_isf : dfnum:arr -> dfden:arr -> arr -> arr
val cauchy_rvs : loc:arr -> scale:arr -> n:int -> arr
val cauchy_pdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logpdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_cdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logcdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_ppf : loc:arr -> scale:arr -> arr -> arr
val cauchy_sf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logsf : loc:arr -> scale:arr -> arr -> arr
val cauchy_isf : loc:arr -> scale:arr -> arr -> arr
val lomax_rvs : shape:arr -> scale:arr -> n:int -> arr
val lomax_pdf : shape:arr -> scale:arr -> arr -> arr
val lomax_logpdf : shape:arr -> scale:arr -> arr -> arr
val lomax_cdf : shape:arr -> scale:arr -> arr -> arr
val lomax_logcdf : shape:arr -> scale:arr -> arr -> arr
val lomax_ppf : shape:arr -> scale:arr -> arr -> arr
val lomax_sf : shape:arr -> scale:arr -> arr -> arr
val lomax_logsf : shape:arr -> scale:arr -> arr -> arr
val lomax_isf : shape:arr -> scale:arr -> arr -> arr
val weibull_rvs : shape:arr -> scale:arr -> n:int -> arr
val weibull_pdf : shape:arr -> scale:arr -> arr -> arr
val weibull_logpdf : shape:arr -> scale:arr -> arr -> arr
val weibull_cdf : shape:arr -> scale:arr -> arr -> arr
val weibull_logcdf : shape:arr -> scale:arr -> arr -> arr
val weibull_ppf : shape:arr -> scale:arr -> arr -> arr
val weibull_sf : shape:arr -> scale:arr -> arr -> arr
val weibull_logsf : shape:arr -> scale:arr -> arr -> arr
val weibull_isf : shape:arr -> scale:arr -> arr -> arr
val laplace_rvs : loc:arr -> scale:arr -> n:int -> arr
val laplace_pdf : loc:arr -> scale:arr -> arr -> arr
val laplace_logpdf : loc:arr -> scale:arr -> arr -> arr
val laplace_cdf : loc:arr -> scale:arr -> arr -> arr
val laplace_logcdf : loc:arr -> scale:arr -> arr -> arr
val laplace_ppf : loc:arr -> scale:arr -> arr -> arr
val laplace_sf : loc:arr -> scale:arr -> arr -> arr
val laplace_logsf : loc:arr -> scale:arr -> arr -> arr
val laplace_isf : loc:arr -> scale:arr -> arr -> arr
val gumbel1_rvs : a:arr -> b:arr -> n:int -> arr
val gumbel1_pdf : a:arr -> b:arr -> arr -> arr
val gumbel1_logpdf : a:arr -> b:arr -> arr -> arr
val gumbel1_cdf : a:arr -> b:arr -> arr -> arr
val gumbel1_logcdf : a:arr -> b:arr -> arr -> arr
val gumbel1_ppf : a:arr -> b:arr -> arr -> arr
val gumbel1_sf : a:arr -> b:arr -> arr -> arr
val gumbel1_logsf : a:arr -> b:arr -> arr -> arr
val gumbel1_isf : a:arr -> b:arr -> arr -> arr
val gumbel2_rvs : a:arr -> b:arr -> n:int -> arr
val gumbel2_pdf : a:arr -> b:arr -> arr -> arr
val gumbel2_logpdf : a:arr -> b:arr -> arr -> arr
val gumbel2_cdf : a:arr -> b:arr -> arr -> arr
val gumbel2_logcdf : a:arr -> b:arr -> arr -> arr
val gumbel2_ppf : a:arr -> b:arr -> arr -> arr
val gumbel2_sf : a:arr -> b:arr -> arr -> arr
val gumbel2_logsf : a:arr -> b:arr -> arr -> arr
val gumbel2_isf : a:arr -> b:arr -> arr -> arr
val logistic_rvs : loc:arr -> scale:arr -> n:int -> arr
val logistic_pdf : loc:arr -> scale:arr -> arr -> arr
val logistic_logpdf : loc:arr -> scale:arr -> arr -> arr
val logistic_cdf : loc:arr -> scale:arr -> arr -> arr
val logistic_logcdf : loc:arr -> scale:arr -> arr -> arr
val logistic_ppf : loc:arr -> scale:arr -> arr -> arr
val logistic_sf : loc:arr -> scale:arr -> arr -> arr
val logistic_logsf : loc:arr -> scale:arr -> arr -> arr
val logistic_isf : loc:arr -> scale:arr -> arr -> arr
val lognormal_rvs : mu:arr -> sigma:arr -> n:int -> arr
val lognormal_pdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logpdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_cdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logcdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_ppf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_sf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logsf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_isf : mu:arr -> sigma:arr -> arr -> arr
val rayleigh_rvs : sigma:arr -> n:int -> arr
val rayleigh_pdf : sigma:arr -> arr -> arr
val rayleigh_logpdf : sigma:arr -> arr -> arr
val rayleigh_cdf : sigma:arr -> arr -> arr
val rayleigh_logcdf : sigma:arr -> arr -> arr
val rayleigh_ppf : sigma:arr -> arr -> arr
val rayleigh_sf : sigma:arr -> arr -> arr
val rayleigh_logsf : sigma:arr -> arr -> arr
val rayleigh_isf : sigma:arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types_common/.dummy b/owl-base/Owl_types_common/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types_computation_device/.dummy b/owl-base/Owl_types_computation_device/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types_computation_device/module-type-Sig/A/Linalg/index.html b/owl-base/Owl_types_computation_device/module-type-Sig/A/Linalg/index.html deleted file mode 100644 index 8b507cbd1..000000000 --- a/owl-base/Owl_types_computation_device/module-type-Sig/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_types_computation_device.Sig.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types_computation_device/module-type-Sig/A/Mat/index.html b/owl-base/Owl_types_computation_device/module-type-Sig/A/Mat/index.html deleted file mode 100644 index d14d56ca3..000000000 --- a/owl-base/Owl_types_computation_device/module-type-Sig/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_types_computation_device.Sig.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types_computation_device/module-type-Sig/A/Scalar/index.html b/owl-base/Owl_types_computation_device/module-type-Sig/A/Scalar/index.html deleted file mode 100644 index aa9d30e6f..000000000 --- a/owl-base/Owl_types_computation_device/module-type-Sig/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_types_computation_device.Sig.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_types_computation_device/module-type-Sig/A/index.html b/owl-base/Owl_types_computation_device/module-type-Sig/A/index.html deleted file mode 100644 index c5d349f7a..000000000 --- a/owl-base/Owl_types_computation_device/module-type-Sig/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_types_computation_device.Sig.A)

Module Sig.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_types_computation_device/module-type-Sig/index.html b/owl-base/Owl_types_computation_device/module-type-Sig/index.html deleted file mode 100644 index bbad4dbba..000000000 --- a/owl-base/Owl_types_computation_device/module-type-Sig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Sig (owl-base.Owl_types_computation_device.Sig)

Module type Owl_types_computation_device.Sig

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/.dummy b/owl-base/Owl_types_computation_engine/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Linalg/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Linalg/index.html deleted file mode 100644 index 41f5aeaaf..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Linalg/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Linalg (owl-base.Owl_types_computation_engine.Sig.Graph.Optimiser.Operator.Linalg)

Module Operator.Linalg

\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Mat/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Mat/index.html deleted file mode 100644 index 2ecb0b451..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_types_computation_engine.Sig.Graph.Optimiser.Operator.Mat)

Module Operator.Mat

val eye : int -> Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Scalar/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Scalar/index.html deleted file mode 100644 index 21ddff802..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Scalar/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -Scalar (owl-base.Owl_types_computation_engine.Sig.Graph.Optimiser.Operator.Scalar)

Module Operator.Scalar

\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html deleted file mode 100644 index f3a4234d9..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_types_computation_engine.Sig.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html deleted file mode 100644 index fb1f3647f..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_types_computation_engine.Sig.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html deleted file mode 100644 index e868415c3..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_types_computation_engine.Sig.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html deleted file mode 100644 index 1c4b06030..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl-base.Owl_types_computation_engine.Sig.Graph.Optimiser.Operator.Symbol.Shape.Type.Device.A)

Module Device.A

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html deleted file mode 100644 index 35b16f588..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/Device/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Device (owl-base.Owl_types_computation_engine.Sig.Graph.Optimiser.Operator.Symbol.Shape.Type.Device)

Module Type.Device

Type definition
type device

TODO

type value

TODO

Core functions
val make_device : unit -> device

TODO

val arr_to_value : A.arr -> value

TODO

val value_to_arr : value -> A.arr

TODO

val elt_to_value : A.elt -> value

TODO

val value_to_elt : value -> A.elt

TODO

val value_to_float : value -> float

TODO

val is_arr : value -> bool

TODO

val is_elt : value -> bool

TODO

\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html deleted file mode 100644 index 9d3ef58e6..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/Type/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Type (owl-base.Owl_types_computation_engine.Sig.Graph.Optimiser.Operator.Symbol.Shape.Type)

Module Shape.Type

Type definition
type state =
  1. | Valid
  2. | Invalid
    (*

    TODO

    *)

TODO

and block = {
  1. size : int;
  2. block_id : int;
  3. mutable active : t option;
  4. mutable memory : Device.value;
  5. mutable nodes : t list;
}

block type keeps a reference to a block of memory and to the nodes sharing that block.

and attr = {
  1. mutable op : op;
  2. mutable freeze : bool;
  3. mutable reuse : bool;
  4. mutable state : state;
  5. mutable shape : int array option array;
  6. mutable value : Device.value array;
  7. mutable block : block array option;
}

TODO

and arr =
  1. | Arr of t
and elt =
  1. | Elt of t
and op =
  1. | Noop
  2. | Var
  3. | Const
  4. | Empty of int array
  5. | Zeros of int array
  6. | Ones of int array
  7. | Create of int array
  8. | Sequential of int array
  9. | Uniform of int array
  10. | Gaussian of int array
  11. | Bernoulli of int array
  12. | Init of int array * int -> elt
  13. | Get of int array
  14. | Set of int array
  15. | GetSlice of int list list
  16. | SetSlice of int list list
  17. | GetFancy of Owl_types_common.index list
  18. | SetFancy of Owl_types_common.index list
  19. | Copy
  20. | Reset
  21. | Reshape of int array
  22. | Reverse
  23. | Tile of int array
  24. | Repeat of int array
  25. | Pad of elt * int list list
  26. | Concatenate of int
  27. | Stack of int
  28. | Split of int * int array
  29. | Draw of int * int
  30. | Map of elt -> elt
  31. | Fold of int * elt -> elt -> elt
  32. | Scan of int * elt -> elt -> elt
  33. | OneHot of int
  34. | OfArray of int array
  35. | Delay of Device.A.arr -> Device.A.arr
  36. | DelayArray of int array * Device.A.arr array -> Device.A.arr
  37. | LazyPrint of int option - * int option - * bool option - * (Device.A.elt -> string) option
  38. | Abs
  39. | Neg
  40. | Floor
  41. | Ceil
  42. | Round
  43. | Sqr
  44. | Sqrt
  45. | Log
  46. | Log2
  47. | Log10
  48. | Exp
  49. | Sin
  50. | Cos
  51. | Tan
  52. | Sinh
  53. | Cosh
  54. | Tanh
  55. | Asin
  56. | Acos
  57. | Atan
  58. | Asinh
  59. | Acosh
  60. | Atanh
  61. | Min of bool * int
  62. | Max of bool * int
  63. | Sum of bool * int
  64. | SumReduce of int array
  65. | Signum
  66. | Sigmoid
  67. | Relu
  68. | Dawsn
  69. | Min'
  70. | Max'
  71. | Sum'
  72. | LogSumExp'
  73. | LogSumExp of bool * int
  74. | L1norm'
  75. | L2norm'
  76. | L2NormSqr'
  77. | ClipByValue
  78. | ClipByL2norm
  79. | Pow
  80. | ScalarPow
  81. | PowScalar
  82. | Atan2
  83. | ScalarAtan2
  84. | Atan2Scalar
  85. | Hypot
  86. | Min2
  87. | Max2
  88. | Add
  89. | Sub
  90. | Mul
  91. | Div
  92. | AddScalar
  93. | SubScalar
  94. | MulScalar
  95. | DivScalar
  96. | ScalarAdd
  97. | ScalarSub
  98. | ScalarMul
  99. | ScalarDiv
  100. | FMA
  101. | EltEqual
  102. | EltNotEqual
  103. | EltLess
  104. | EltGreater
  105. | EltLessEqual
  106. | EltGreaterEqual
  107. | EltEqualScalar
  108. | EltNotEqualScalar
  109. | EltLessScalar
  110. | EltGreaterScalar
  111. | EltLessEqualScalar
  112. | EltGreaterEqualScalar
  113. | Conv1d of Owl_types_common.padding * int array
  114. | Conv2d of Owl_types_common.padding * int array
  115. | Conv3d of Owl_types_common.padding * int array
  116. | TransposeConv1d of Owl_types_common.padding * int array
  117. | TransposeConv2d of Owl_types_common.padding * int array
  118. | TransposeConv3d of Owl_types_common.padding * int array
  119. | DilatedConv1d of Owl_types_common.padding * int array * int array
  120. | DilatedConv2d of Owl_types_common.padding * int array * int array
  121. | DilatedConv3d of Owl_types_common.padding * int array * int array
  122. | MaxPool1d of Owl_types_common.padding * int array * int array
  123. | MaxPool2d of Owl_types_common.padding * int array * int array
  124. | MaxPool3d of Owl_types_common.padding * int array * int array
  125. | AvgPool1d of Owl_types_common.padding * int array * int array
  126. | AvgPool2d of Owl_types_common.padding * int array * int array
  127. | AvgPool3d of Owl_types_common.padding * int array * int array
  128. | UpSampling2d of int array
  129. | Conv1dBackwardInput of int array
  130. | Conv1dBackwardKernel of int array
  131. | Conv2dBackwardInput of int array
  132. | Conv2dBackwardKernel of int array
  133. | Conv3dBackwardInput of int array
  134. | Conv3dBackwardKernel of int array
  135. | TransposeConv1dBackwardInput of int array
  136. | TransposeConv1dBackwardKernel of int array
  137. | TransposeConv2dBackwardInput of int array
  138. | TransposeConv2dBackwardKernel of int array
  139. | TransposeConv3dBackwardInput of int array
  140. | TransposeConv3dBackwardKernel of int array
  141. | DilatedConv1dBackwardInput of int array * int array
  142. | DilatedConv1dBackwardKernel of int array * int array
  143. | DilatedConv2dBackwardInput of int array * int array
  144. | DilatedConv2dBackwardKernel of int array * int array
  145. | DilatedConv3dBackwardInput of int array * int array
  146. | DilatedConv3dBackwardKernel of int array * int array
  147. | MaxPool1dBackward of Owl_types_common.padding * int array * int array
  148. | MaxPool2dBackward of Owl_types_common.padding * int array * int array
  149. | MaxPool3dBackward of Owl_types_common.padding * int array * int array
  150. | AvgPool1dBackward of Owl_types_common.padding * int array * int array
  151. | AvgPool2dBackward of Owl_types_common.padding * int array * int array
  152. | AvgPool3dBackward of Owl_types_common.padding * int array * int array
  153. | UpSampling2dBackward of int array
  154. | RowNum
  155. | ColNum
  156. | Row
  157. | Rows of int array
  158. | CopyRowTo
  159. | CopyColTo
  160. | Dot of bool * bool * elt * elt
  161. | Inv
  162. | Trace
  163. | Transpose of int array
  164. | ToRows
  165. | OfRows
  166. | Scalar_Add
  167. | Scalar_Sub
  168. | Scalar_Mul
  169. | Scalar_Div
  170. | Scalar_Pow
  171. | Scalar_Atan2
  172. | Scalar_Abs
  173. | Scalar_Neg
  174. | Scalar_Sqr
  175. | Scalar_Sqrt
  176. | Scalar_Exp
  177. | Scalar_Log
  178. | Scalar_Log2
  179. | Scalar_Log10
  180. | Scalar_Signum
  181. | Scalar_Floor
  182. | Scalar_Ceil
  183. | Scalar_Round
  184. | Scalar_Sin
  185. | Scalar_Cos
  186. | Scalar_Tan
  187. | Scalar_Sinh
  188. | Scalar_Cosh
  189. | Scalar_Tanh
  190. | Scalar_Asin
  191. | Scalar_Acos
  192. | Scalar_Atan
  193. | Scalar_Asinh
  194. | Scalar_Acosh
  195. | Scalar_Atanh
  196. | Scalar_Relu
  197. | Scalar_Dawsn
  198. | Scalar_Sigmoid
  199. | Fused_Adagrad of float * float
    (*

    TODO

    *)
\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/index.html deleted file mode 100644 index d00339b98..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/Shape/index.html +++ /dev/null @@ -1,5 +0,0 @@ - -Shape (owl-base.Owl_types_computation_engine.Sig.Graph.Optimiser.Operator.Symbol.Shape)

Module Symbol.Shape

Core functions
val infer_shape : - Type.op -> - Type.attr Owl_graph.node array -> - int array option array

TODO

\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/index.html deleted file mode 100644 index 9609270bf..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/Symbol/index.html +++ /dev/null @@ -1,28 +0,0 @@ - -Symbol (owl-base.Owl_types_computation_engine.Sig.Graph.Optimiser.Operator.Symbol)

Module Operator.Symbol

Core functions
val op_to_str : Shape.Type.op -> string

TODO

val is_random_variable : Shape.Type.op -> bool

TODO

val refnum : 'a Owl_graph.node -> int

TODO

val node_shape : Shape.Type.attr Owl_graph.node -> int array

TODO

val node_numel : Shape.Type.attr Owl_graph.node -> int

TODO

val is_shape_unknown : Shape.Type.attr Owl_graph.node -> bool

TODO

val infer_shape_graph : Shape.Type.attr Owl_graph.node array -> unit

TODO

val shape_to_str : int array option array -> string

TODO

val node_to_str : Shape.Type.attr Owl_graph.node -> string

TODO

val node_to_arr : Shape.Type.t -> Shape.Type.arr

TODO

val arr_to_node : Shape.Type.arr -> Shape.Type.t

TODO

val node_to_elt : Shape.Type.t -> Shape.Type.elt

TODO

val elt_to_node : Shape.Type.elt -> Shape.Type.t

TODO

val make_node : - ?name:string -> - ?value:Shape.Type.Device.value array -> - ?shape:int array option array -> - ?freeze:bool -> - ?reuse:bool -> - ?state:Shape.Type.state -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node

TODO

val make_then_connect : - ?shape:int array option array -> - Shape.Type.op -> - Shape.Type.attr Owl_graph.node array -> - Shape.Type.attr Owl_graph.node

TODO

val var_arr : ?shape:int array -> string -> Shape.Type.arr

TODO

val var_elt : string -> Shape.Type.elt

TODO

val const_arr : string -> Shape.Type.Device.A.arr -> Shape.Type.arr

TODO

val const_elt : string -> Shape.Type.Device.A.elt -> Shape.Type.elt

TODO

val new_block_id : unit -> int

new_block_id () returns an unused block id.

val make_empty_block : ?block_id:int -> int -> Shape.Type.block

make_empty_block s returns an empty block of memory of size s.

val make_value_block : - Shape.Type.Device.value -> - Shape.Type.attr Owl_graph.node -> - unit

make_value_block value node creates a block of memory initialised with value and links the new block to node.

get_block node returns the memory block allocated to node. If no block is allocated, throws an exception.

val add_node_to_block : - Shape.Type.attr Owl_graph.node -> - Shape.Type.block -> - unit

Link a node to a reusable block and initialises its memory on the memory of the block.

val get_active_node : Shape.Type.block -> Shape.Type.attr Owl_graph.node option

Return the node that is currently using the memory of the block.

val set_active_node : - Shape.Type.block -> - Shape.Type.attr Owl_graph.node -> - unit

Update the node that is currently using the block of memory.

val get_block_id : Shape.Type.attr Owl_graph.node -> int

get_block_id node returns the id of the block assigned to node. If node has not been assigned yet, returns -1.

val set_value : - Shape.Type.attr Owl_graph.node -> - Shape.Type.Device.value array -> - unit

TODO

val set_operator : Shape.Type.attr Owl_graph.node -> Shape.Type.op -> unit

TODO

TODO

val set_reuse : Shape.Type.attr Owl_graph.node -> bool -> unit

TODO

val get_reuse : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_shared : Shape.Type.attr Owl_graph.node -> bool

TODO

val get_shared_nodes : - Shape.Type.attr Owl_graph.node -> - Shape.Type.attr Owl_graph.node array

get_shared_nodes node returns the nodes sharing the same block of memory as node.

val is_var : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_const : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_arr : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_node_elt : Shape.Type.attr Owl_graph.node -> bool

TODO

val is_assigned : Shape.Type.attr Owl_graph.node -> bool

is_assigned node checks if a block of memory has been assigned to node.

val check_assigned : Shape.Type.attr Owl_graph.node -> unit

check_assigned node throws an exception if node has not been assigned to a block.

val is_valid : Shape.Type.attr Owl_graph.node -> bool

TODO

val validate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate : Shape.Type.attr Owl_graph.node -> unit

TODO

val invalidate_graph : Shape.Type.attr Owl_graph.node -> unit

TODO

val is_freeze : Shape.Type.attr Owl_graph.node -> bool

TODO

val freeze : Shape.Type.attr Owl_graph.node -> unit

TODO

val freeze_descendants : Shape.Type.attr Owl_graph.node array -> unit

TODO

val freeze_ancestors : Shape.Type.attr Owl_graph.node array -> unit

TODO

TODO

TODO

val unsafe_assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_arr : Shape.Type.arr -> Shape.Type.Device.A.arr -> unit

TODO

val assign_elt : Shape.Type.elt -> Shape.Type.Device.A.elt -> unit

TODO

val float_to_elt : float -> Shape.Type.elt

TODO

val elt_to_float : Shape.Type.elt -> float

TODO

\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/index.html deleted file mode 100644 index 1044c2a1d..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/Operator/index.html +++ /dev/null @@ -1,420 +0,0 @@ - -Operator (owl-base.Owl_types_computation_engine.Sig.Graph.Optimiser.Operator)

Module Optimiser.Operator

Vectorised functions
val empty : int array -> Symbol.Shape.Type.arr

TODO

val zeros : int array -> Symbol.Shape.Type.arr

TODO

val ones : int array -> Symbol.Shape.Type.arr

TODO

val create : int array -> Symbol.Shape.Type.elt -> Symbol.Shape.Type.arr

TODO

val sequential : - ?a:Symbol.Shape.Type.elt -> - ?step:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val uniform : - ?a:Symbol.Shape.Type.elt -> - ?b:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val gaussian : - ?mu:Symbol.Shape.Type.elt -> - ?sigma:Symbol.Shape.Type.elt -> - int array -> - Symbol.Shape.Type.arr

TODO

val bernoulli : ?p:Symbol.Shape.Type.elt -> int array -> Symbol.Shape.Type.arr

TODO

val init : int array -> (int -> Symbol.Shape.Type.elt) -> Symbol.Shape.Type.arr

TODO

val init_nd : - int array -> - (int array -> Symbol.Shape.Type.elt) -> - Symbol.Shape.Type.arr

TODO

val shape : Symbol.Shape.Type.arr -> int array

TODO

val numel : Symbol.Shape.Type.arr -> int

TODO

TODO

val set : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.elt -> unit

TODO

val get_slice : int list list -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val set_slice : - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

TODO

val set_fancy : - Owl_types.index list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - unit

TODO

val copy_ : out:'a -> 'b -> 'c

TODO

val reset : Symbol.Shape.Type.arr -> unit

TODO

val reshape : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val tile : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val repeat : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val pad : - ?v:Symbol.Shape.Type.elt -> - int list list -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val expand : ?hi:bool -> Symbol.Shape.Type.arr -> int -> Symbol.Shape.Type.arr

TODO

val squeeze : ?axis:int array -> Symbol.Shape.Type.arr -> Symbol.Shape.Type.arr

TODO

val concatenate : - ?axis:int -> - Symbol.Shape.Type.arr array -> - Symbol.Shape.Type.arr

TODO

val stack : ?axis:int -> Symbol.Shape.Type.arr array -> Symbol.Shape.Type.arr

TODO

TODO

val split : ?axis:int -> 'a -> 'b -> 'c

TODO

val draw : - ?axis:int -> - Symbol.Shape.Type.arr -> - int -> - Symbol.Shape.Type.arr * 'a array

TODO

TODO

delay f x returns f x. It allows to use a function that is not tracked by the computation graph and delay its evaluation. The output must have the same shape as the input.

delay_array out_shape f x works in the same way as delay but is applied on an array of ndarrays. Needs the shape of the output as an argument.

val lazy_print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(Symbol.Shape.Type.Device.A.elt -> string) -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

lazy_print x prints the output of x when it is evaluated. Is implemented as an identity node. For information about the optional parameters, refer to the print function of the Ndarray module.

val print : ?max_row:'a -> ?max_col:'b -> ?header:'c -> ?fmt:'d -> 'e -> unit

TODO

val min : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val sum_reduce : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

TODO

TODO

TODO

TODO

val elt_less_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val elt_greater_equal_scalar : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.elt -> - Symbol.Shape.Type.arr

TODO

val conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr

TODO

val upsampling2d : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val transpose_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv1d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv2d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_input : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val dilated_conv3d_backward_kernel : - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val max_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool1d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool2d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val avg_pool3d_backward : - Owl_types.padding -> - Symbol.Shape.Type.arr -> - int array -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val upsampling2d_backward : - Symbol.Shape.Type.arr -> - int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val row_num : Symbol.Shape.Type.arr -> int

TODO

val col_num : Symbol.Shape.Type.arr -> int

TODO

val rows : Symbol.Shape.Type.arr -> int array -> Symbol.Shape.Type.arr

TODO

val copy_row_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

val copy_col_to : Symbol.Shape.Type.arr -> 'a -> 'b -> unit

TODO

TODO

val transpose : - ?axis:int array -> - Symbol.Shape.Type.arr -> - Symbol.Shape.Type.arr

TODO

val to_rows : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val to_cols : Symbol.Shape.Type.arr -> 'a array

TODO

TODO

val of_array : - Symbol.Shape.Type.elt array -> - int array -> - Symbol.Shape.Type.arr

TODO

val of_arrays : Symbol.Shape.Type.elt array array -> Symbol.Shape.Type.arr

TODO

val to_arrays : Symbol.Shape.Type.arr -> Symbol.Shape.Type.elt array array

TODO

Scalar functions
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/index.html deleted file mode 100644 index 6b427ca07..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/Optimiser/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Optimiser (owl-base.Owl_types_computation_engine.Sig.Graph.Optimiser)

Module Graph.Optimiser

Core functions
val estimate_complexity : 'a Owl_graph.node array -> int * int

TODO

val optimise_nodes : - Operator.Symbol.Shape.Type.attr Owl_graph.node array -> - unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/index.html deleted file mode 100644 index 6689edad0..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/Graph/index.html +++ /dev/null @@ -1,33 +0,0 @@ - -Graph (owl-base.Owl_types_computation_engine.Sig.Graph)

Module Sig.Graph

Type definition
type graph

TODO

Core functions
val shape_or_value : Optimiser.Operator.Symbol.Shape.Type.t -> string

TODO

val graph_to_dot : graph -> string

TODO

val graph_to_trace : graph -> string

TODO

val save_graph : 'a -> string -> unit

TODO

val load_graph : string -> 'a * 'b

TODO

val invalidate_rvs : graph -> unit

TODO

val is_iopair_safe : 'a Owl_graph.node -> 'a Owl_graph.node -> bool

TODO

val update_iopair : graph -> unit

TODO

val remove_unused_iopair : - 'a Owl_graph.node array -> - 'b array -> - 'a Owl_graph.node array * 'b array

TODO

val optimise : graph -> unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_types_computation_engine/module-type-Sig/index.html b/owl-base/Owl_types_computation_engine/module-type-Sig/index.html deleted file mode 100644 index 19093e65d..000000000 --- a/owl-base/Owl_types_computation_engine/module-type-Sig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Sig (owl-base.Owl_types_computation_engine.Sig)

Module type Owl_types_computation_engine.Sig

Core evaluation functions of the engine

TODO

TODO

val eval_graph : Graph.graph -> unit

TODO

\ No newline at end of file diff --git a/owl-base/Owl_types_maths_basic/.dummy b/owl-base/Owl_types_maths_basic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types_maths_basic/module-type-Sig/index.html b/owl-base/Owl_types_maths_basic/module-type-Sig/index.html deleted file mode 100644 index 90ddd5cff..000000000 --- a/owl-base/Owl_types_maths_basic/module-type-Sig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Sig (owl-base.Owl_types_maths_basic.Sig)

Module type Owl_types_maths_basic.Sig

type elt
val add : elt -> elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_types_ndarray_algodiff/.dummy b/owl-base/Owl_types_ndarray_algodiff/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types_ndarray_algodiff/module-type-Sig/Linalg/index.html b/owl-base/Owl_types_ndarray_algodiff/module-type-Sig/Linalg/index.html deleted file mode 100644 index 06e921cd2..000000000 --- a/owl-base/Owl_types_ndarray_algodiff/module-type-Sig/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_types_ndarray_algodiff.Sig.Linalg)

Module Sig.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types_ndarray_algodiff/module-type-Sig/Mat/index.html b/owl-base/Owl_types_ndarray_algodiff/module-type-Sig/Mat/index.html deleted file mode 100644 index 70e9acdfd..000000000 --- a/owl-base/Owl_types_ndarray_algodiff/module-type-Sig/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_types_ndarray_algodiff.Sig.Mat)

Module Sig.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types_ndarray_algodiff/module-type-Sig/Scalar/index.html b/owl-base/Owl_types_ndarray_algodiff/module-type-Sig/Scalar/index.html deleted file mode 100644 index cb2331eed..000000000 --- a/owl-base/Owl_types_ndarray_algodiff/module-type-Sig/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_types_ndarray_algodiff.Sig.Scalar)

Module Sig.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_types_ndarray_algodiff/module-type-Sig/index.html b/owl-base/Owl_types_ndarray_algodiff/module-type-Sig/index.html deleted file mode 100644 index 9ec8470b9..000000000 --- a/owl-base/Owl_types_ndarray_algodiff/module-type-Sig/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -Sig (owl-base.Owl_types_ndarray_algodiff.Sig)

Module type Owl_types_ndarray_algodiff.Sig

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl-base/Owl_types_ndarray_basic/.dummy b/owl-base/Owl_types_ndarray_basic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types_ndarray_basic/module-type-Sig/index.html b/owl-base/Owl_types_ndarray_basic/module-type-Sig/index.html deleted file mode 100644 index cf83db0d2..000000000 --- a/owl-base/Owl_types_ndarray_basic/module-type-Sig/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -Sig (owl-base.Owl_types_ndarray_basic.Sig)

Module type Owl_types_ndarray_basic.Sig

type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
\ No newline at end of file diff --git a/owl-base/Owl_types_ndarray_compare/.dummy b/owl-base/Owl_types_ndarray_compare/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types_ndarray_compare/module-type-Sig/index.html b/owl-base/Owl_types_ndarray_compare/module-type-Sig/index.html deleted file mode 100644 index 3a71d3862..000000000 --- a/owl-base/Owl_types_ndarray_compare/module-type-Sig/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -Sig (owl-base.Owl_types_ndarray_compare.Sig)

Module type Owl_types_ndarray_compare.Sig

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val less : arr -> arr -> bool
val greater : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> elt -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> elt -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types_ndarray_eltcmp/.dummy b/owl-base/Owl_types_ndarray_eltcmp/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types_ndarray_eltcmp/module-type-Sig/index.html b/owl-base/Owl_types_ndarray_eltcmp/module-type-Sig/index.html deleted file mode 100644 index 8f002d2ac..000000000 --- a/owl-base/Owl_types_ndarray_eltcmp/module-type-Sig/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -Sig (owl-base.Owl_types_ndarray_eltcmp.Sig)

Module type Owl_types_ndarray_eltcmp.Sig

include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types_ndarray_mutable/.dummy b/owl-base/Owl_types_ndarray_mutable/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types_ndarray_mutable/module-type-Sig/Linalg/index.html b/owl-base/Owl_types_ndarray_mutable/module-type-Sig/Linalg/index.html deleted file mode 100644 index 208e5c108..000000000 --- a/owl-base/Owl_types_ndarray_mutable/module-type-Sig/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_types_ndarray_mutable.Sig.Linalg)

Module Sig.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types_ndarray_mutable/module-type-Sig/Mat/index.html b/owl-base/Owl_types_ndarray_mutable/module-type-Sig/Mat/index.html deleted file mode 100644 index 5230c38fe..000000000 --- a/owl-base/Owl_types_ndarray_mutable/module-type-Sig/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_types_ndarray_mutable.Sig.Mat)

Module Sig.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types_ndarray_mutable/module-type-Sig/Scalar/index.html b/owl-base/Owl_types_ndarray_mutable/module-type-Sig/Scalar/index.html deleted file mode 100644 index a8093bbbd..000000000 --- a/owl-base/Owl_types_ndarray_mutable/module-type-Sig/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_types_ndarray_mutable.Sig.Scalar)

Module Sig.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_types_ndarray_mutable/module-type-Sig/index.html b/owl-base/Owl_types_ndarray_mutable/module-type-Sig/index.html deleted file mode 100644 index 2b8c5b971..000000000 --- a/owl-base/Owl_types_ndarray_mutable/module-type-Sig/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -Sig (owl-base.Owl_types_ndarray_mutable.Sig)

Module type Owl_types_ndarray_mutable.Sig

include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
\ No newline at end of file diff --git a/owl-base/Owl_types_ndarray_numdiff/.dummy b/owl-base/Owl_types_ndarray_numdiff/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types_ndarray_numdiff/module-type-Sig/index.html b/owl-base/Owl_types_ndarray_numdiff/module-type-Sig/index.html deleted file mode 100644 index 6241d62bf..000000000 --- a/owl-base/Owl_types_ndarray_numdiff/module-type-Sig/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -Sig (owl-base.Owl_types_ndarray_numdiff.Sig)

Module type Owl_types_ndarray_numdiff.Sig

include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val mapi : (int -> elt -> elt) -> arr -> arr
val (+) : arr -> arr -> arr
val (-) : arr -> arr -> arr
val (*) : arr -> arr -> arr
val (/) : arr -> arr -> arr
val (+$) : arr -> elt -> arr
val (-$) : arr -> elt -> arr
val (*$) : arr -> elt -> arr
val (/$) : arr -> elt -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types_operator/.dummy b/owl-base/Owl_types_operator/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types_operator/module-type-BasicSig/index.html b/owl-base/Owl_types_operator/module-type-BasicSig/index.html deleted file mode 100644 index d7928b893..000000000 --- a/owl-base/Owl_types_operator/module-type-BasicSig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -BasicSig (owl-base.Owl_types_operator.BasicSig)

Module type Owl_types_operator.BasicSig

type ('a, 'b) t
val add : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val sub : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val mul : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val div : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val add_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val sub_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val mul_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val div_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val scalar_add : 'a -> ('a, 'b) t -> ('a, 'b) t
val scalar_sub : 'a -> ('a, 'b) t -> ('a, 'b) t
val scalar_mul : 'a -> ('a, 'b) t -> ('a, 'b) t
val scalar_div : 'a -> ('a, 'b) t -> ('a, 'b) t
val equal : ('a, 'b) t -> ('a, 'b) t -> bool
val not_equal : ('a, 'b) t -> ('a, 'b) t -> bool
val greater : ('a, 'b) t -> ('a, 'b) t -> bool
val less : ('a, 'b) t -> ('a, 'b) t -> bool
val greater_equal : ('a, 'b) t -> ('a, 'b) t -> bool
val less_equal : ('a, 'b) t -> ('a, 'b) t -> bool
\ No newline at end of file diff --git a/owl-base/Owl_types_operator/module-type-ExtendSig/index.html b/owl-base/Owl_types_operator/module-type-ExtendSig/index.html deleted file mode 100644 index be0c4f6a5..000000000 --- a/owl-base/Owl_types_operator/module-type-ExtendSig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -ExtendSig (owl-base.Owl_types_operator.ExtendSig)

Module type Owl_types_operator.ExtendSig

type ('a, 'b) t
val equal_scalar : ('a, 'b) t -> 'a -> bool
val not_equal_scalar : ('a, 'b) t -> 'a -> bool
val less_scalar : ('a, 'b) t -> 'a -> bool
val greater_scalar : ('a, 'b) t -> 'a -> bool
val less_equal_scalar : ('a, 'b) t -> 'a -> bool
val greater_equal_scalar : ('a, 'b) t -> 'a -> bool
val elt_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val elt_not_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val elt_less : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val elt_greater : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val elt_less_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val elt_greater_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val elt_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val elt_not_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val elt_less_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val elt_greater_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val elt_less_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val elt_greater_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t
val fmod : (float, 'a) t -> (float, 'a) t -> (float, 'a) t
val fmod_scalar : (float, 'a) t -> float -> (float, 'a) t
val pow : (float, 'a) t -> (float, 'a) t -> (float, 'a) t
val scalar_pow : float -> (float, 'a) t -> (float, 'a) t
val pow_scalar : (float, 'a) t -> float -> (float, 'a) t
val approx_equal : ?eps:float -> ('a, 'b) t -> ('a, 'b) t -> bool
val approx_equal_scalar : ?eps:float -> ('a, 'b) t -> 'a -> bool
val approx_elt_equal : ?eps:float -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val approx_elt_equal_scalar : ?eps:float -> ('a, 'b) t -> 'a -> ('a, 'b) t
val add_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit
val sub_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit
val mul_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit
val div_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit
val add_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit
val sub_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit
val mul_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit
val div_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit
val concat_vertical : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val concat_horizontal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
val get_fancy_ext : Owl_types.index array -> ('a, 'b) t -> ('a, 'b) t
val set_fancy_ext : Owl_types.index array -> ('a, 'b) t -> ('a, 'b) t -> unit
val get_slice_ext : int list array -> ('a, 'b) t -> ('a, 'b) t
val set_slice_ext : int list array -> ('a, 'b) t -> ('a, 'b) t -> unit
\ No newline at end of file diff --git a/owl-base/Owl_types_operator/module-type-LinalgSig/index.html b/owl-base/Owl_types_operator/module-type-LinalgSig/index.html deleted file mode 100644 index 875946f31..000000000 --- a/owl-base/Owl_types_operator/module-type-LinalgSig/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -LinalgSig (owl-base.Owl_types_operator.LinalgSig)

Module type Owl_types_operator.LinalgSig

type ('a, 'b) t
val mpow : ('a, 'b) t -> float -> ('a, 'b) t
val linsolve : - ?trans:bool -> - ?typ:[ `n | `u | `l ] -> - ('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t
\ No newline at end of file diff --git a/owl-base/Owl_types_operator/module-type-MatrixSig/index.html b/owl-base/Owl_types_operator/module-type-MatrixSig/index.html deleted file mode 100644 index 48c0b6e5d..000000000 --- a/owl-base/Owl_types_operator/module-type-MatrixSig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -MatrixSig (owl-base.Owl_types_operator.MatrixSig)

Module type Owl_types_operator.MatrixSig

type ('a, 'b) t
val get : ('a, 'b) t -> int -> int -> 'a
val set : ('a, 'b) t -> int -> int -> 'a -> unit
val dot : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t
\ No newline at end of file diff --git a/owl-base/Owl_types_operator/module-type-NdarraySig/index.html b/owl-base/Owl_types_operator/module-type-NdarraySig/index.html deleted file mode 100644 index 84fa1a51e..000000000 --- a/owl-base/Owl_types_operator/module-type-NdarraySig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -NdarraySig (owl-base.Owl_types_operator.NdarraySig)

Module type Owl_types_operator.NdarraySig

type ('a, 'b) t
val get : ('a, 'b) t -> int array -> 'a
val set : ('a, 'b) t -> int array -> 'a -> unit
\ No newline at end of file diff --git a/owl-base/Owl_types_stats_basic/.dummy b/owl-base/Owl_types_stats_basic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types_stats_dist/.dummy b/owl-base/Owl_types_stats_dist/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_types_stats_dist/module-type-Sig/Linalg/index.html b/owl-base/Owl_types_stats_dist/module-type-Sig/Linalg/index.html deleted file mode 100644 index 3b59c5393..000000000 --- a/owl-base/Owl_types_stats_dist/module-type-Sig/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl-base.Owl_types_stats_dist.Sig.Linalg)

Module Sig.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types_stats_dist/module-type-Sig/Mat/index.html b/owl-base/Owl_types_stats_dist/module-type-Sig/Mat/index.html deleted file mode 100644 index 5136a8dd1..000000000 --- a/owl-base/Owl_types_stats_dist/module-type-Sig/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl-base.Owl_types_stats_dist.Sig.Mat)

Module Sig.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl-base/Owl_types_stats_dist/module-type-Sig/Scalar/index.html b/owl-base/Owl_types_stats_dist/module-type-Sig/Scalar/index.html deleted file mode 100644 index 43de7e952..000000000 --- a/owl-base/Owl_types_stats_dist/module-type-Sig/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl-base.Owl_types_stats_dist.Sig.Scalar)

Module Sig.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl-base/Owl_types_stats_dist/module-type-Sig/index.html b/owl-base/Owl_types_stats_dist/module-type-Sig/index.html deleted file mode 100644 index 9f46b9bdc..000000000 --- a/owl-base/Owl_types_stats_dist/module-type-Sig/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -Sig (owl-base.Owl_types_stats_dist.Sig)

Module type Owl_types_stats_dist.Sig

include Owl_types_ndarray_mutable.Sig
include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
val uniform_rvs : a:arr -> b:arr -> n:int -> arr
val uniform_pdf : a:arr -> b:arr -> arr -> arr
val uniform_logpdf : a:arr -> b:arr -> arr -> arr
val uniform_cdf : a:arr -> b:arr -> arr -> arr
val uniform_logcdf : a:arr -> b:arr -> arr -> arr
val uniform_ppf : a:arr -> b:arr -> arr -> arr
val uniform_sf : a:arr -> b:arr -> arr -> arr
val uniform_logsf : a:arr -> b:arr -> arr -> arr
val uniform_isf : a:arr -> b:arr -> arr -> arr
val gaussian_rvs : mu:arr -> sigma:arr -> n:int -> arr
val gaussian_pdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logpdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_cdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logcdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_ppf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_sf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logsf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_isf : mu:arr -> sigma:arr -> arr -> arr
val exponential_rvs : lambda:arr -> n:int -> arr
val exponential_pdf : lambda:arr -> arr -> arr
val exponential_logpdf : lambda:arr -> arr -> arr
val exponential_cdf : lambda:arr -> arr -> arr
val exponential_logcdf : lambda:arr -> arr -> arr
val exponential_ppf : lambda:arr -> arr -> arr
val exponential_sf : lambda:arr -> arr -> arr
val exponential_logsf : lambda:arr -> arr -> arr
val exponential_isf : lambda:arr -> arr -> arr
val poisson_rvs : mu:arr -> n:int -> arr
val gamma_rvs : shape:arr -> scale:arr -> n:int -> arr
val gamma_pdf : shape:arr -> scale:arr -> arr -> arr
val gamma_logpdf : shape:arr -> scale:arr -> arr -> arr
val gamma_cdf : shape:arr -> scale:arr -> arr -> arr
val gamma_logcdf : shape:arr -> scale:arr -> arr -> arr
val gamma_ppf : shape:arr -> scale:arr -> arr -> arr
val gamma_sf : shape:arr -> scale:arr -> arr -> arr
val gamma_logsf : shape:arr -> scale:arr -> arr -> arr
val gamma_isf : shape:arr -> scale:arr -> arr -> arr
val beta_rvs : a:arr -> b:arr -> n:int -> arr
val beta_pdf : a:arr -> b:arr -> arr -> arr
val beta_logpdf : a:arr -> b:arr -> arr -> arr
val beta_cdf : a:arr -> b:arr -> arr -> arr
val beta_logcdf : a:arr -> b:arr -> arr -> arr
val beta_ppf : a:arr -> b:arr -> arr -> arr
val beta_sf : a:arr -> b:arr -> arr -> arr
val beta_logsf : a:arr -> b:arr -> arr -> arr
val beta_isf : a:arr -> b:arr -> arr -> arr
val chi2_rvs : df:arr -> n:int -> arr
val chi2_pdf : df:arr -> arr -> arr
val chi2_logpdf : df:arr -> arr -> arr
val chi2_cdf : df:arr -> arr -> arr
val chi2_logcdf : df:arr -> arr -> arr
val chi2_ppf : df:arr -> arr -> arr
val chi2_sf : df:arr -> arr -> arr
val chi2_logsf : df:arr -> arr -> arr
val chi2_isf : df:arr -> arr -> arr
val f_rvs : dfnum:arr -> dfden:arr -> n:int -> arr
val f_pdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logpdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_cdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logcdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_ppf : dfnum:arr -> dfden:arr -> arr -> arr
val f_sf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logsf : dfnum:arr -> dfden:arr -> arr -> arr
val f_isf : dfnum:arr -> dfden:arr -> arr -> arr
val cauchy_rvs : loc:arr -> scale:arr -> n:int -> arr
val cauchy_pdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logpdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_cdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logcdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_ppf : loc:arr -> scale:arr -> arr -> arr
val cauchy_sf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logsf : loc:arr -> scale:arr -> arr -> arr
val cauchy_isf : loc:arr -> scale:arr -> arr -> arr
val lomax_rvs : shape:arr -> scale:arr -> n:int -> arr
val lomax_pdf : shape:arr -> scale:arr -> arr -> arr
val lomax_logpdf : shape:arr -> scale:arr -> arr -> arr
val lomax_cdf : shape:arr -> scale:arr -> arr -> arr
val lomax_logcdf : shape:arr -> scale:arr -> arr -> arr
val lomax_ppf : shape:arr -> scale:arr -> arr -> arr
val lomax_sf : shape:arr -> scale:arr -> arr -> arr
val lomax_logsf : shape:arr -> scale:arr -> arr -> arr
val lomax_isf : shape:arr -> scale:arr -> arr -> arr
val weibull_rvs : shape:arr -> scale:arr -> n:int -> arr
val weibull_pdf : shape:arr -> scale:arr -> arr -> arr
val weibull_logpdf : shape:arr -> scale:arr -> arr -> arr
val weibull_cdf : shape:arr -> scale:arr -> arr -> arr
val weibull_logcdf : shape:arr -> scale:arr -> arr -> arr
val weibull_ppf : shape:arr -> scale:arr -> arr -> arr
val weibull_sf : shape:arr -> scale:arr -> arr -> arr
val weibull_logsf : shape:arr -> scale:arr -> arr -> arr
val weibull_isf : shape:arr -> scale:arr -> arr -> arr
val laplace_rvs : loc:arr -> scale:arr -> n:int -> arr
val laplace_pdf : loc:arr -> scale:arr -> arr -> arr
val laplace_logpdf : loc:arr -> scale:arr -> arr -> arr
val laplace_cdf : loc:arr -> scale:arr -> arr -> arr
val laplace_logcdf : loc:arr -> scale:arr -> arr -> arr
val laplace_ppf : loc:arr -> scale:arr -> arr -> arr
val laplace_sf : loc:arr -> scale:arr -> arr -> arr
val laplace_logsf : loc:arr -> scale:arr -> arr -> arr
val laplace_isf : loc:arr -> scale:arr -> arr -> arr
val gumbel1_rvs : a:arr -> b:arr -> n:int -> arr
val gumbel1_pdf : a:arr -> b:arr -> arr -> arr
val gumbel1_logpdf : a:arr -> b:arr -> arr -> arr
val gumbel1_cdf : a:arr -> b:arr -> arr -> arr
val gumbel1_logcdf : a:arr -> b:arr -> arr -> arr
val gumbel1_ppf : a:arr -> b:arr -> arr -> arr
val gumbel1_sf : a:arr -> b:arr -> arr -> arr
val gumbel1_logsf : a:arr -> b:arr -> arr -> arr
val gumbel1_isf : a:arr -> b:arr -> arr -> arr
val gumbel2_rvs : a:arr -> b:arr -> n:int -> arr
val gumbel2_pdf : a:arr -> b:arr -> arr -> arr
val gumbel2_logpdf : a:arr -> b:arr -> arr -> arr
val gumbel2_cdf : a:arr -> b:arr -> arr -> arr
val gumbel2_logcdf : a:arr -> b:arr -> arr -> arr
val gumbel2_ppf : a:arr -> b:arr -> arr -> arr
val gumbel2_sf : a:arr -> b:arr -> arr -> arr
val gumbel2_logsf : a:arr -> b:arr -> arr -> arr
val gumbel2_isf : a:arr -> b:arr -> arr -> arr
val logistic_rvs : loc:arr -> scale:arr -> n:int -> arr
val logistic_pdf : loc:arr -> scale:arr -> arr -> arr
val logistic_logpdf : loc:arr -> scale:arr -> arr -> arr
val logistic_cdf : loc:arr -> scale:arr -> arr -> arr
val logistic_logcdf : loc:arr -> scale:arr -> arr -> arr
val logistic_ppf : loc:arr -> scale:arr -> arr -> arr
val logistic_sf : loc:arr -> scale:arr -> arr -> arr
val logistic_logsf : loc:arr -> scale:arr -> arr -> arr
val logistic_isf : loc:arr -> scale:arr -> arr -> arr
val lognormal_rvs : mu:arr -> sigma:arr -> n:int -> arr
val lognormal_pdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logpdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_cdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logcdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_ppf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_sf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logsf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_isf : mu:arr -> sigma:arr -> arr -> arr
val rayleigh_rvs : sigma:arr -> n:int -> arr
val rayleigh_pdf : sigma:arr -> arr -> arr
val rayleigh_logpdf : sigma:arr -> arr -> arr
val rayleigh_cdf : sigma:arr -> arr -> arr
val rayleigh_logcdf : sigma:arr -> arr -> arr
val rayleigh_ppf : sigma:arr -> arr -> arr
val rayleigh_sf : sigma:arr -> arr -> arr
val rayleigh_logsf : sigma:arr -> arr -> arr
val rayleigh_isf : sigma:arr -> arr -> arr
\ No newline at end of file diff --git a/owl-base/Owl_utils/.dummy b/owl-base/Owl_utils/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_utils_array/.dummy b/owl-base/Owl_utils_array/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_utils_heap/.dummy b/owl-base/Owl_utils_heap/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_utils_infer_shape/.dummy b/owl-base/Owl_utils_infer_shape/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_utils_multimap/.dummy b/owl-base/Owl_utils_multimap/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_utils_multimap/Make/index.html b/owl-base/Owl_utils_multimap/Make/index.html deleted file mode 100644 index a8015b2eb..000000000 --- a/owl-base/Owl_utils_multimap/Make/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make (owl-base.Owl_utils_multimap.Make)

Module Owl_utils_multimap.Make

Parameters

module Ord : Stdlib.Map.OrderedType

Signature

Type definition
type key = Ord.t

Type of the multimap keys.

type 'a t

Type of a multimap.

Basic functions
val empty : 'a t

The empty multimap.

val is_empty : 'a t -> bool

Check whether the multimap is empty.

val mem : key -> 'a t -> bool

mem k m returns true is the multimap m contains at least one binding for k, false otherwise.

val add : key -> 'a -> 'a t -> 'a t

add k v m returns a multimap containing the same bindings as m, plus a binding from k to v. Previous bindings for k are hidden by the new binding (they can be restored by calling remove k m).

val remove : key -> 'a t -> 'a t

remove k v m returns a multimap with the same bindings as m, except for the binding of k: the last value that was bound to it is removed. If there is no binding for k in m, raises `Not_found`.

val find : key -> 'a t -> 'a

find k m returns the last added binding of k in m, or raises Not_found if there is no such binding.

val max_binding : 'a t -> key * 'a

max_binding m returns the greatest binding in m. Raises Not_found if m is empty.

val find_first_opt : (key -> bool) -> 'a t -> (key * 'a) option

find_first_opt f m returns the first binding (k, v) such that f k, or None if no such binding exists. The function f has to be nondecreasing. Time complexity is O(log n).

\ No newline at end of file diff --git a/owl-base/Owl_utils_ndarray/.dummy b/owl-base/Owl_utils_ndarray/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_utils_stack/.dummy b/owl-base/Owl_utils_stack/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_view/.dummy b/owl-base/Owl_view/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl-base/Owl_view/Make/argument-1-A/index.html b/owl-base/Owl_view/Make/argument-1-A/index.html deleted file mode 100644 index 220c11810..000000000 --- a/owl-base/Owl_view/Make/argument-1-A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl-base.Owl_view.Make.A)

Parameter Make.A

include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
\ No newline at end of file diff --git a/owl-base/Owl_view/Make/index.html b/owl-base/Owl_view/Make/index.html deleted file mode 100644 index f2aca2b4c..000000000 --- a/owl-base/Owl_view/Make/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make (owl-base.Owl_view.Make)

Module Owl_view.Make

Parameters

Signature

Type definition
type t

t is the abstract type to represent a view atop of an ndarray.

Conversion functions
val of_arr : A.arr -> t

of_arr x creates a view from ndarray x.

val to_arr : t -> A.arr

to_arr x creates an new ndarray based on the view x.

Manipulation functions
val get : t -> int array -> A.elt

Refer to :doc:`owl_dense_ndarray_generic`

val set : t -> int array -> A.elt -> unit

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> t -> t -> unit

Refer to :doc:`owl_dense_ndarray_generic`

val shape : t -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val num_dims : t -> int

Refer to :doc:`owl_dense_ndarray_generic`

val nth_dim : t -> int -> int

Refer to :doc:`owl_dense_ndarray_generic`

val numel : t -> int

Refer to :doc:`owl_dense_ndarray_generic`

Iteration functions
val iteri : (int -> A.elt -> unit) -> t -> unit

iteri f x iterates and applies f to every element in x. f has type f : int array -> elt -> unit, the first parameter is index. 1d indices are passed to the user function.

val iter : (A.elt -> unit) -> t -> unit

Similar to iteri, the index is not passed in.

val mapi : (int -> A.elt -> A.elt) -> t -> unit

mapi f x applies f : int array -> elt -> elt to every element in x, then save the result in place. 1d indices are passed to the user function.

val map : (A.elt -> A.elt) -> t -> unit

map f x applies f : elt -> elt to every element in x, then save the the result in place in x.

val iter2 : (A.elt -> A.elt -> unit) -> t -> t -> unit

iter2 f x y applies f : elt -> elt -> elt every pair of elements in x and y. The indices are not passed in the user function.

val map2 : (A.elt -> A.elt -> A.elt) -> t -> t -> unit

map2 f x y applies f : elt -> elt -> elt every pair of elements in x and y, then saves the result in y. So be careful with the order, it matters, the data reflected by view y will be modified.

val iteri_nd : (int array -> A.elt -> unit) -> t -> unit

Similar to `iteri` but n-d indices are passed in. This function is much slower than `iteri`.

val mapi_nd : (int array -> A.elt -> A.elt) -> t -> unit

Similar to `mapi` but n-d indices are passed in. This function is much slower than `mapi`.

Examination & Comparison
val exists : (A.elt -> bool) -> t -> bool

exists f x checks all the elements in x using f. If at least one element satisfies f then the function returns true otherwise false.

val not_exists : (A.elt -> bool) -> t -> bool

not_exists f x checks all the elements in x, the function returns true only if all the elements fail to satisfy f : float -> bool.

val for_all : (A.elt -> bool) -> t -> bool

for_all f x checks all the elements in x, the function returns true if and only if all the elements pass the check of function f.

val equal : t -> t -> bool

equal x y returns true if x and y are elementwise equal.

val not_equal : t -> t -> bool

not_equal x y returns true if x and y are not elementwise equal.

\ No newline at end of file diff --git a/owl-top/Owl_top/.dummy b/owl-top/Owl_top/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl/.dummy b/owl/Owl/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl/Arr/index.html b/owl/Owl/Arr/index.html deleted file mode 100644 index 06605de1b..000000000 --- a/owl/Owl/Arr/index.html +++ /dev/null @@ -1,579 +0,0 @@ - -Arr (owl.Owl.Arr)

Module Owl.Arr

include module type of struct include Owl_dense.Ndarray.D end
include module type of struct include Owl_dense_ndarray_d end
type elt = float
type arr = - (float, Stdlib.Bigarray.float64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
include Owl_dense_ndarray_intf.Common with type elt := elt and type arr := arr
include Owl_base_dense_ndarray_intf.Common - with type elt := elt - with type arr := arr
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
Create N-dimensional array
val linspace : elt -> elt -> int -> arr

linspace k 0. 9. 10 ...

val logspace : ?base:float -> elt -> elt -> int -> arr

logspace k 0. 9. 10 ...

val unit_basis : int -> int -> arr

unit_basis k n i returns a unit basis vector with ith element set to 1.

Obtain basic properties
val num_dims : arr -> int
val nth_dim : arr -> int -> int
val nnz : arr -> int
val density : arr -> float
val size_in_bytes : arr -> int
val same_shape : arr -> arr -> bool
val same_data : arr -> arr -> bool
val ind : arr -> int -> int array
val i1d : arr -> int array -> int
Manipulate a N-dimensional array
val get_index : arr -> int array array -> elt array
val set_index : arr -> int array array -> elt array -> unit
val get_fancy : Owl_types.index list -> arr -> arr
val set_fancy : Owl_types.index list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val sub_ndarray : int array -> arr -> arr array
val slice_left : arr -> int array -> arr
val fill : arr -> elt -> unit
val resize : ?head:bool -> arr -> int array -> arr
val flip : ?axis:int -> arr -> arr
val rotate : arr -> int -> arr
val swap : int -> int -> arr -> arr
val concat_vertical : arr -> arr -> arr
val concat_horizontal : arr -> arr -> arr
val concat_vh : arr array array -> arr
val split_vh : (int * int) array array -> arr -> arr array array
val dropout : ?rate:float -> arr -> arr
val top : arr -> int -> int array array
val bottom : arr -> int -> int array array
val sort : arr -> arr
val sort1 : ?axis:int -> arr -> arr
val argsort : - arr -> - (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val mmap : Unix.file_descr -> ?pos:int64 -> bool -> int array -> arr
Iterate array elements
val iter2i : (int -> elt -> elt -> unit) -> arr -> arr -> unit
val iter2 : (elt -> elt -> unit) -> arr -> arr -> unit
val map2i : (int -> elt -> elt -> elt) -> arr -> arr -> arr
val map2 : (elt -> elt -> elt) -> arr -> arr -> arr
val iteri_nd : (int array -> elt -> unit) -> arr -> unit
val mapi_nd : (int array -> elt -> elt) -> arr -> arr
val foldi_nd : - ?axis:int -> - (int array -> elt -> elt -> elt) -> - elt -> - arr -> - arr
val scani_nd : ?axis:int -> (int array -> elt -> elt -> elt) -> arr -> arr
val filteri_nd : (int array -> elt -> bool) -> arr -> int array array
val iter2i_nd : (int array -> elt -> elt -> unit) -> arr -> arr -> unit
val map2i_nd : (int array -> elt -> elt -> elt) -> arr -> arr -> arr
val iteri_slice : ?axis:int -> (int -> arr -> unit) -> arr -> unit
val iter_slice : ?axis:int -> (arr -> unit) -> arr -> unit
val mapi_slice : ?axis:int -> (int -> arr -> 'c) -> arr -> 'c array
val map_slice : ?axis:int -> (arr -> 'c) -> arr -> 'c array
val filteri_slice : ?axis:int -> (int -> arr -> bool) -> arr -> arr array
val filter_slice : ?axis:int -> (arr -> bool) -> arr -> arr array
val foldi_slice : ?axis:int -> (int -> 'c -> arr -> 'c) -> 'c -> arr -> 'c
val fold_slice : ?axis:int -> ('c -> arr -> 'c) -> 'c -> arr -> 'c
Examine array elements or compare two arrays
Input/Output functions
val to_array : arr -> elt array
val save : out:string -> arr -> unit
val load : string -> arr
val save_npy : out:string -> arr -> unit
val load_npy : string -> arr
Unary mathematical operations
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod' : arr -> elt
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean' : arr -> elt
val median' : arr -> elt
val median : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var' : arr -> elt
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std' : arr -> elt
val sem : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sem' : arr -> elt
val minmax : ?axis:int -> ?keep_dims:bool -> arr -> arr * arr
val minmax' : arr -> elt * elt
val min_i : arr -> elt * int array
val max_i : arr -> elt * int array
val minmax_i : arr -> (elt * int array) * (elt * int array)
val abs2 : arr -> arr
val conj : arr -> arr
val reci : arr -> arr
val reci_tol : ?tol:elt -> arr -> arr
val cbrt : arr -> arr
val exp2 : arr -> arr
val exp10 : arr -> arr
val expm1 : arr -> arr
val log1p : arr -> arr
val trunc : arr -> arr
val fix : arr -> arr
val modf : arr -> arr * arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> arr -> arr
val vecnorm : ?axis:int -> ?p:float -> ?keep_dims:bool -> arr -> arr
val vecnorm' : ?p:float -> arr -> elt
val cumsum : ?axis:int -> arr -> arr
val cumprod : ?axis:int -> arr -> arr
val cummin : ?axis:int -> arr -> arr
val cummax : ?axis:int -> arr -> arr
val diff : ?axis:int -> ?n:int -> arr -> arr
val lgamma : arr -> arr
Binary mathematical operations
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val ssqr' : arr -> elt -> elt
val ssqr_diff' : arr -> arr -> elt
Tensor Calculus
val contract1 : (int * int) array -> arr -> arr
val contract2 : (int * int) array -> arr -> arr -> arr
Experimental functions
val slide : ?axis:int -> ?ofs:int -> ?step:int -> window:int -> arr -> arr
Functions of in-place modification
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:float -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val sort_ : arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_fancy_ : out:arr -> Owl_types.index list -> arr -> unit
val set_fancy_ : out:arr -> Owl_types.index list -> arr -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val conj_ : ?out:arr -> arr -> unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit

Matrix functions

val col : arr -> int -> arr
val cols : arr -> int array -> arr
val to_arrays : arr -> elt array array
val draw_rows : ?replacement:bool -> arr -> int -> arr * int array
val draw_cols : ?replacement:bool -> arr -> int -> arr * int array
val draw_rows2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
val draw_cols2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
include Owl_dense_ndarray_intf.Real with type elt := elt and type arr := arr
include Owl_base_dense_ndarray_intf.Real - with type elt := elt - with type arr := arr
val sum_slices : ?axis:int -> arr -> arr
val signum : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val atan2 : arr -> arr -> arr
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> float -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> float -> arr
val dot : arr -> arr -> arr
val trace : arr -> elt
Helper functions
val float_to_elt : float -> elt
val elt_to_float : elt -> float
Real operations
val i0 : arr -> arr
val i0e : arr -> arr
val i1 : arr -> arr
val i1e : arr -> arr
val iv : v:arr -> arr -> arr
val scalar_iv : v:elt -> arr -> arr
val iv_scalar : v:arr -> elt -> arr
val j0 : arr -> arr
val j1 : arr -> arr
val jv : v:arr -> arr -> arr
val scalar_jv : v:elt -> arr -> arr
val jv_scalar : v:arr -> elt -> arr
val erf : arr -> arr
val erfc : arr -> arr
val logistic : arr -> arr
val elu : ?alpha:elt -> arr -> arr
val leaky_relu : ?alpha:elt -> arr -> arr
val softplus : arr -> arr
val softsign : arr -> arr
val softmax : ?axis:int -> arr -> arr
val sigmoid : arr -> arr
val log_sum_exp' : arr -> float
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val fmod_scalar : arr -> elt -> arr
val scalar_fmod : elt -> arr -> arr
val cross_entropy' : arr -> arr -> float
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
val poisson : mu:elt -> int array -> arr
val poisson_ : mu:elt -> out:arr -> unit
include Owl_dense_ndarray_intf.NN with type arr := arr
include Owl_base_dense_ndarray_intf.NN with type arr := arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val max_pool2d_argmax : - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - arr - * (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
include Owl_dense_ndarray_intf.Distribution with type arr := arr
Stats & distribution functions
val uniform_rvs : a:arr -> b:arr -> n:int -> arr
val uniform_pdf : a:arr -> b:arr -> arr -> arr
val uniform_logpdf : a:arr -> b:arr -> arr -> arr
val uniform_cdf : a:arr -> b:arr -> arr -> arr
val uniform_logcdf : a:arr -> b:arr -> arr -> arr
val uniform_ppf : a:arr -> b:arr -> arr -> arr
val uniform_sf : a:arr -> b:arr -> arr -> arr
val uniform_logsf : a:arr -> b:arr -> arr -> arr
val uniform_isf : a:arr -> b:arr -> arr -> arr
val gaussian_rvs : mu:arr -> sigma:arr -> n:int -> arr
val gaussian_pdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logpdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_cdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logcdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_ppf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_sf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logsf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_isf : mu:arr -> sigma:arr -> arr -> arr
val exponential_rvs : lambda:arr -> n:int -> arr
val exponential_pdf : lambda:arr -> arr -> arr
val exponential_logpdf : lambda:arr -> arr -> arr
val exponential_cdf : lambda:arr -> arr -> arr
val exponential_logcdf : lambda:arr -> arr -> arr
val exponential_ppf : lambda:arr -> arr -> arr
val exponential_sf : lambda:arr -> arr -> arr
val exponential_logsf : lambda:arr -> arr -> arr
val exponential_isf : lambda:arr -> arr -> arr
val gamma_rvs : shape:arr -> scale:arr -> n:int -> arr
val gamma_pdf : shape:arr -> scale:arr -> arr -> arr
val gamma_logpdf : shape:arr -> scale:arr -> arr -> arr
val gamma_cdf : shape:arr -> scale:arr -> arr -> arr
val gamma_logcdf : shape:arr -> scale:arr -> arr -> arr
val gamma_ppf : shape:arr -> scale:arr -> arr -> arr
val gamma_sf : shape:arr -> scale:arr -> arr -> arr
val gamma_logsf : shape:arr -> scale:arr -> arr -> arr
val gamma_isf : shape:arr -> scale:arr -> arr -> arr
val beta_rvs : a:arr -> b:arr -> n:int -> arr
val beta_pdf : a:arr -> b:arr -> arr -> arr
val beta_logpdf : a:arr -> b:arr -> arr -> arr
val beta_cdf : a:arr -> b:arr -> arr -> arr
val beta_logcdf : a:arr -> b:arr -> arr -> arr
val beta_ppf : a:arr -> b:arr -> arr -> arr
val beta_sf : a:arr -> b:arr -> arr -> arr
val beta_logsf : a:arr -> b:arr -> arr -> arr
val beta_isf : a:arr -> b:arr -> arr -> arr
val chi2_rvs : df:arr -> n:int -> arr
val chi2_pdf : df:arr -> arr -> arr
val chi2_logpdf : df:arr -> arr -> arr
val chi2_cdf : df:arr -> arr -> arr
val chi2_logcdf : df:arr -> arr -> arr
val chi2_ppf : df:arr -> arr -> arr
val chi2_sf : df:arr -> arr -> arr
val chi2_logsf : df:arr -> arr -> arr
val chi2_isf : df:arr -> arr -> arr
val f_rvs : dfnum:arr -> dfden:arr -> n:int -> arr
val f_pdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logpdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_cdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logcdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_ppf : dfnum:arr -> dfden:arr -> arr -> arr
val f_sf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logsf : dfnum:arr -> dfden:arr -> arr -> arr
val f_isf : dfnum:arr -> dfden:arr -> arr -> arr
val cauchy_rvs : loc:arr -> scale:arr -> n:int -> arr
val cauchy_pdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logpdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_cdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logcdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_ppf : loc:arr -> scale:arr -> arr -> arr
val cauchy_sf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logsf : loc:arr -> scale:arr -> arr -> arr
val cauchy_isf : loc:arr -> scale:arr -> arr -> arr
val lomax_rvs : shape:arr -> scale:arr -> n:int -> arr
val lomax_pdf : shape:arr -> scale:arr -> arr -> arr
val lomax_logpdf : shape:arr -> scale:arr -> arr -> arr
val lomax_cdf : shape:arr -> scale:arr -> arr -> arr
val lomax_logcdf : shape:arr -> scale:arr -> arr -> arr
val lomax_ppf : shape:arr -> scale:arr -> arr -> arr
val lomax_sf : shape:arr -> scale:arr -> arr -> arr
val lomax_logsf : shape:arr -> scale:arr -> arr -> arr
val lomax_isf : shape:arr -> scale:arr -> arr -> arr
val weibull_rvs : shape:arr -> scale:arr -> n:int -> arr
val weibull_pdf : shape:arr -> scale:arr -> arr -> arr
val weibull_logpdf : shape:arr -> scale:arr -> arr -> arr
val weibull_cdf : shape:arr -> scale:arr -> arr -> arr
val weibull_logcdf : shape:arr -> scale:arr -> arr -> arr
val weibull_ppf : shape:arr -> scale:arr -> arr -> arr
val weibull_sf : shape:arr -> scale:arr -> arr -> arr
val weibull_logsf : shape:arr -> scale:arr -> arr -> arr
val weibull_isf : shape:arr -> scale:arr -> arr -> arr
val laplace_rvs : loc:arr -> scale:arr -> n:int -> arr
val laplace_pdf : loc:arr -> scale:arr -> arr -> arr
val laplace_logpdf : loc:arr -> scale:arr -> arr -> arr
val laplace_cdf : loc:arr -> scale:arr -> arr -> arr
val laplace_logcdf : loc:arr -> scale:arr -> arr -> arr
val laplace_ppf : loc:arr -> scale:arr -> arr -> arr
val laplace_sf : loc:arr -> scale:arr -> arr -> arr
val laplace_logsf : loc:arr -> scale:arr -> arr -> arr
val laplace_isf : loc:arr -> scale:arr -> arr -> arr
val gumbel1_rvs : a:arr -> b:arr -> n:int -> arr
val gumbel1_pdf : a:arr -> b:arr -> arr -> arr
val gumbel1_logpdf : a:arr -> b:arr -> arr -> arr
val gumbel1_cdf : a:arr -> b:arr -> arr -> arr
val gumbel1_logcdf : a:arr -> b:arr -> arr -> arr
val gumbel1_ppf : a:arr -> b:arr -> arr -> arr
val gumbel1_sf : a:arr -> b:arr -> arr -> arr
val gumbel1_logsf : a:arr -> b:arr -> arr -> arr
val gumbel1_isf : a:arr -> b:arr -> arr -> arr
val gumbel2_rvs : a:arr -> b:arr -> n:int -> arr
val gumbel2_pdf : a:arr -> b:arr -> arr -> arr
val gumbel2_logpdf : a:arr -> b:arr -> arr -> arr
val gumbel2_cdf : a:arr -> b:arr -> arr -> arr
val gumbel2_logcdf : a:arr -> b:arr -> arr -> arr
val gumbel2_ppf : a:arr -> b:arr -> arr -> arr
val gumbel2_sf : a:arr -> b:arr -> arr -> arr
val gumbel2_logsf : a:arr -> b:arr -> arr -> arr
val gumbel2_isf : a:arr -> b:arr -> arr -> arr
val logistic_rvs : loc:arr -> scale:arr -> n:int -> arr
val logistic_pdf : loc:arr -> scale:arr -> arr -> arr
val logistic_logpdf : loc:arr -> scale:arr -> arr -> arr
val logistic_cdf : loc:arr -> scale:arr -> arr -> arr
val logistic_logcdf : loc:arr -> scale:arr -> arr -> arr
val logistic_ppf : loc:arr -> scale:arr -> arr -> arr
val logistic_sf : loc:arr -> scale:arr -> arr -> arr
val logistic_logsf : loc:arr -> scale:arr -> arr -> arr
val logistic_isf : loc:arr -> scale:arr -> arr -> arr
val lognormal_rvs : mu:arr -> sigma:arr -> n:int -> arr
val lognormal_pdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logpdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_cdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logcdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_ppf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_sf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logsf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_isf : mu:arr -> sigma:arr -> arr -> arr
val rayleigh_rvs : sigma:arr -> n:int -> arr
val rayleigh_pdf : sigma:arr -> arr -> arr
val rayleigh_logpdf : sigma:arr -> arr -> arr
val rayleigh_cdf : sigma:arr -> arr -> arr
val rayleigh_logcdf : sigma:arr -> arr -> arr
val rayleigh_ppf : sigma:arr -> arr -> arr
val rayleigh_sf : sigma:arr -> arr -> arr
val rayleigh_logsf : sigma:arr -> arr -> arr
val rayleigh_isf : sigma:arr -> arr -> arr
include module type of struct include Owl_dense_ndarray.Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (%) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (%$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (+=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (.!{;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
include sig ... end
val (.%{}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
val mpow : Owl_linalg_d.mat -> float -> Owl_linalg_d.mat
\ No newline at end of file diff --git a/owl/Owl/Mat/index.html b/owl/Owl/Mat/index.html deleted file mode 100644 index dbae98797..000000000 --- a/owl/Owl/Mat/index.html +++ /dev/null @@ -1,225 +0,0 @@ - -Mat (owl.Owl.Mat)

Module Owl.Mat

include module type of struct include Owl_dense.Matrix.D end
include module type of struct include Owl_dense_matrix_d end
type elt = float
type mat = (float, Stdlib.Bigarray.float64_elt) Owl_dense_matrix_generic.t
include Owl_dense_matrix_intf.Common with type elt := elt and type mat := mat
Create dense matrices
val empty : int -> int -> mat
val create : int -> int -> elt -> mat
val init : int -> int -> (int -> elt) -> mat
val init_2d : int -> int -> (int -> int -> elt) -> mat
val zeros : int -> int -> mat
val ones : int -> int -> mat
val eye : int -> mat
val sequential : ?a:elt -> ?step:elt -> int -> int -> mat
val uniform : ?a:elt -> ?b:elt -> int -> int -> mat
val gaussian : ?mu:elt -> ?sigma:elt -> int -> int -> mat
val bernoulli : ?p:float -> int -> int -> mat
val unit_basis : int -> int -> mat
val diagm : ?k:int -> mat -> mat
val triu : ?k:int -> mat -> mat
val tril : ?k:int -> mat -> mat
val symmetric : ?upper:bool -> mat -> mat
val bidiagonal : ?upper:bool -> mat -> mat -> mat
val toeplitz : ?c:mat -> mat -> mat
val hankel : ?r:mat -> mat -> mat
val hadamard : int -> mat
val magic : int -> mat
Dense row vectors and meshgrids
val vector : int -> mat
val vector_zeros : int -> mat
val vector_ones : int -> mat
val vector_uniform : int -> mat
val linspace : elt -> elt -> int -> mat
val logspace : ?base:float -> elt -> elt -> int -> mat
val meshgrid : elt -> elt -> elt -> elt -> int -> int -> mat * mat
val meshup : mat -> mat -> mat * mat
Obtain the basic properties of a matrix
val shape : mat -> int * int
val row_num : mat -> int
val col_num : mat -> int
val numel : mat -> int
val nnz : mat -> int
val density : mat -> float
val size_in_bytes : mat -> int
val same_shape : mat -> mat -> bool
val same_data : mat -> mat -> bool
Manipulate a matrix
val get : mat -> int -> int -> elt
val set : mat -> int -> int -> elt -> unit
val get_index : mat -> int array array -> elt array
val set_index : mat -> int array array -> elt array -> unit
val get_fancy : Owl_types.index list -> mat -> mat
val set_fancy : Owl_types.index list -> mat -> mat -> unit
val get_slice : int list list -> mat -> mat
val set_slice : int list list -> mat -> mat -> unit
val row : mat -> int -> mat
val col : mat -> int -> mat
val rows : mat -> int array -> mat
val cols : mat -> int array -> mat
val resize : ?head:bool -> mat -> int array -> mat
val reshape : mat -> int array -> mat
val flatten : mat -> mat
val reverse : mat -> mat
val flip : ?axis:int -> mat -> mat
val rotate : mat -> int -> mat
val reset : mat -> unit
val fill : mat -> elt -> unit
val copy : mat -> mat
val copy_row_to : mat -> mat -> int -> unit
val copy_col_to : mat -> mat -> int -> unit
val concat_vertical : mat -> mat -> mat
val concat_horizontal : mat -> mat -> mat
val concat_vh : mat array array -> mat
val concatenate : ?axis:int -> mat array -> mat
val split : ?axis:int -> int array -> mat -> mat array
val split_vh : (int * int) array array -> mat -> mat array array
val transpose : mat -> mat
val ctranspose : mat -> mat
val swap_rows : mat -> int -> int -> unit
val swap_cols : mat -> int -> int -> unit
val tile : mat -> int array -> mat
val repeat : mat -> int array -> mat
val pad : ?v:elt -> int list list -> mat -> mat
val dropout : ?rate:float -> mat -> mat
val top : mat -> int -> int array array
val bottom : mat -> int -> int array array
val sort : mat -> mat
val argsort : - mat -> - (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
Iterate elements, columns, and rows.
val iteri : (int -> elt -> unit) -> mat -> unit
val iter : (elt -> unit) -> mat -> unit
val mapi : (int -> elt -> elt) -> mat -> mat
val map : (elt -> elt) -> mat -> mat
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> mat -> mat
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> mat -> mat
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> mat -> mat
val scan : ?axis:int -> (elt -> elt -> elt) -> mat -> mat
val filteri : (int -> elt -> bool) -> mat -> int array
val filter : (elt -> bool) -> mat -> int array
val iteri_2d : (int -> int -> elt -> unit) -> mat -> unit
val mapi_2d : (int -> int -> elt -> elt) -> mat -> mat
val foldi_2d : - ?axis:int -> - (int -> int -> elt -> elt -> elt) -> - elt -> - mat -> - mat
val scani_2d : ?axis:int -> (int -> int -> elt -> elt -> elt) -> mat -> mat
val filteri_2d : (int -> int -> elt -> bool) -> mat -> (int * int) array
val iter2i_2d : (int -> int -> elt -> elt -> unit) -> mat -> mat -> unit
val map2i_2d : (int -> int -> elt -> elt -> elt) -> mat -> mat -> mat
val iter2i : (int -> elt -> elt -> unit) -> mat -> mat -> unit
val iter2 : (elt -> elt -> unit) -> mat -> mat -> unit
val map2i : (int -> elt -> elt -> elt) -> mat -> mat -> mat
val map2 : (elt -> elt -> elt) -> mat -> mat -> mat
val iteri_rows : (int -> mat -> unit) -> mat -> unit
val iter_rows : (mat -> unit) -> mat -> unit
val iter2i_rows : (int -> mat -> mat -> unit) -> mat -> mat -> unit
val iter2_rows : (mat -> mat -> unit) -> mat -> mat -> unit
val iteri_cols : (int -> mat -> unit) -> mat -> unit
val iter_cols : (mat -> unit) -> mat -> unit
val filteri_rows : (int -> mat -> bool) -> mat -> int array
val filter_rows : (mat -> bool) -> mat -> int array
val filteri_cols : (int -> mat -> bool) -> mat -> int array
val filter_cols : (mat -> bool) -> mat -> int array
val fold_rows : ('a -> mat -> 'a) -> 'a -> mat -> 'a
val fold_cols : ('a -> mat -> 'a) -> 'a -> mat -> 'a
val mapi_rows : (int -> mat -> 'a) -> mat -> 'a array
val map_rows : (mat -> 'a) -> mat -> 'a array
val mapi_cols : (int -> mat -> 'a) -> mat -> 'a array
val map_cols : (mat -> 'a) -> mat -> 'a array
val mapi_by_row : int -> (int -> mat -> mat) -> mat -> mat
val map_by_row : int -> (mat -> mat) -> mat -> mat
val mapi_by_col : int -> (int -> mat -> mat) -> mat -> mat
val map_by_col : int -> (mat -> mat) -> mat -> mat
val mapi_at_row : (int -> elt -> elt) -> mat -> int -> mat
val map_at_row : (elt -> elt) -> mat -> int -> mat
val mapi_at_col : (int -> elt -> elt) -> mat -> int -> mat
val map_at_col : (elt -> elt) -> mat -> int -> mat
Examine elements and compare two matrices
val exists : (elt -> bool) -> mat -> bool
val not_exists : (elt -> bool) -> mat -> bool
val for_all : (elt -> bool) -> mat -> bool
val is_zero : mat -> bool
val is_positive : mat -> bool
val is_negative : mat -> bool
val is_nonpositive : mat -> bool
val is_nonnegative : mat -> bool
val is_normal : mat -> bool
val not_nan : mat -> bool
val not_inf : mat -> bool
val equal : mat -> mat -> bool
val not_equal : mat -> mat -> bool
val greater : mat -> mat -> bool
val less : mat -> mat -> bool
val greater_equal : mat -> mat -> bool
val less_equal : mat -> mat -> bool
val elt_equal : mat -> mat -> mat
val elt_not_equal : mat -> mat -> mat
val elt_less : mat -> mat -> mat
val elt_greater : mat -> mat -> mat
val elt_less_equal : mat -> mat -> mat
val elt_greater_equal : mat -> mat -> mat
val equal_scalar : mat -> elt -> bool
val not_equal_scalar : mat -> elt -> bool
val less_scalar : mat -> elt -> bool
val greater_scalar : mat -> elt -> bool
val less_equal_scalar : mat -> elt -> bool
val greater_equal_scalar : mat -> elt -> bool
val elt_equal_scalar : mat -> elt -> mat
val elt_not_equal_scalar : mat -> elt -> mat
val elt_less_scalar : mat -> elt -> mat
val elt_greater_scalar : mat -> elt -> mat
val elt_less_equal_scalar : mat -> elt -> mat
val elt_greater_equal_scalar : mat -> elt -> mat
val approx_equal : ?eps:float -> mat -> mat -> bool
val approx_equal_scalar : ?eps:float -> mat -> elt -> bool
val approx_elt_equal : ?eps:float -> mat -> mat -> mat
val approx_elt_equal_scalar : ?eps:float -> mat -> elt -> mat
Randomisation functions
val draw_rows : ?replacement:bool -> mat -> int -> mat * int array
val draw_cols : ?replacement:bool -> mat -> int -> mat * int array
val draw_rows2 : - ?replacement:bool -> - mat -> - mat -> - int -> - mat * mat * int array
val draw_cols2 : - ?replacement:bool -> - mat -> - mat -> - int -> - mat * mat * int array
val shuffle_rows : mat -> mat
val shuffle_cols : mat -> mat
val shuffle : mat -> mat
Input/Output and helper functions
val to_array : mat -> elt array
val of_array : elt array -> int -> int -> mat
val to_arrays : mat -> elt array array
val of_arrays : elt array array -> mat
val to_rows : mat -> mat array
val of_rows : mat array -> mat
val to_cols : mat -> mat array
val of_cols : mat array -> mat
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - mat -> - unit
val save : out:string -> mat -> unit
val load : string -> mat
val save_txt : ?sep:string -> ?append:bool -> out:string -> mat -> unit
val load_txt : ?sep:string -> string -> mat
val save_npy : out:string -> mat -> unit
val load_npy : string -> mat
Unary mathematical operations
val min : ?axis:int -> ?keep_dims:bool -> mat -> mat
val min' : mat -> elt
val max : ?axis:int -> ?keep_dims:bool -> mat -> mat
val max' : mat -> elt
val minmax : ?axis:int -> ?keep_dims:bool -> mat -> mat * mat
val minmax' : mat -> elt * elt
val min_i : mat -> elt * int array
val max_i : mat -> elt * int array
val minmax_i : mat -> (elt * int array) * (elt * int array)
val trace : mat -> elt
val sum : ?axis:int -> ?keep_dims:bool -> mat -> mat
val sum' : mat -> elt
val prod : ?axis:int -> ?keep_dims:bool -> mat -> mat
val prod' : mat -> elt
val mean : ?axis:int -> ?keep_dims:bool -> mat -> mat
val mean' : mat -> elt
val var' : mat -> elt
val std' : mat -> elt
val sem : ?axis:int -> ?keep_dims:bool -> mat -> mat
val sem' : mat -> elt
val sum_rows : ?keep_dims:bool -> mat -> mat
val sum_cols : ?keep_dims:bool -> mat -> mat
val mean_rows : ?keep_dims:bool -> mat -> mat
val mean_cols : ?keep_dims:bool -> mat -> mat
val abs : mat -> mat
val abs2 : mat -> mat
val conj : mat -> mat
val neg : mat -> mat
val reci : mat -> mat
val reci_tol : ?tol:elt -> mat -> mat
val sqr : mat -> mat
val sqrt : mat -> mat
val cbrt : mat -> mat
val exp : mat -> mat
val exp2 : mat -> mat
val exp10 : mat -> mat
val expm1 : mat -> mat
val log : mat -> mat
val log10 : mat -> mat
val log2 : mat -> mat
val log1p : mat -> mat
val sin : mat -> mat
val cos : mat -> mat
val tan : mat -> mat
val asin : mat -> mat
val acos : mat -> mat
val atan : mat -> mat
val sinh : mat -> mat
val cosh : mat -> mat
val tanh : mat -> mat
val asinh : mat -> mat
val acosh : mat -> mat
val atanh : mat -> mat
val floor : mat -> mat
val ceil : mat -> mat
val round : mat -> mat
val trunc : mat -> mat
val fix : mat -> mat
val modf : mat -> mat * mat
val l1norm : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l1norm' : mat -> elt
val l2norm : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l2norm' : mat -> elt
val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l2norm_sqr' : mat -> elt
val vecnorm : ?axis:int -> ?p:float -> ?keep_dims:bool -> mat -> mat
val vecnorm' : ?p:float -> mat -> elt
val cumsum : ?axis:int -> mat -> mat
val cumprod : ?axis:int -> mat -> mat
val cummin : ?axis:int -> mat -> mat
val cummax : ?axis:int -> mat -> mat
val diff : ?axis:int -> ?n:int -> mat -> mat
val var : ?axis:int -> ?keep_dims:bool -> mat -> mat
val std : ?axis:int -> ?keep_dims:bool -> mat -> mat
val mat2gray : ?amin:elt -> ?amax:elt -> mat -> mat
val lgamma : mat -> mat
val dawsn : mat -> mat
Binary mathematical operations
val add : mat -> mat -> mat
val sub : mat -> mat -> mat
val mul : mat -> mat -> mat
val div : mat -> mat -> mat
val add_scalar : mat -> elt -> mat
val sub_scalar : mat -> elt -> mat
val mul_scalar : mat -> elt -> mat
val div_scalar : mat -> elt -> mat
val scalar_add : elt -> mat -> mat
val scalar_sub : elt -> mat -> mat
val scalar_mul : elt -> mat -> mat
val scalar_div : elt -> mat -> mat
val dot : mat -> mat -> mat
val add_diag : mat -> elt -> mat
val pow : mat -> mat -> mat
val scalar_pow : elt -> mat -> mat
val pow_scalar : mat -> elt -> mat
val min2 : mat -> mat -> mat
val max2 : mat -> mat -> mat
val ssqr' : mat -> elt -> elt
val ssqr_diff' : mat -> mat -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> mat -> mat
val cov : ?b:mat -> a:mat -> mat
val kron : mat -> mat -> mat
val fma : mat -> mat -> mat -> mat
Functions of in-place modification
val create_ : out:mat -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:mat -> unit
val bernoulli_ : ?p:float -> out:mat -> unit
val zeros_ : out:mat -> unit
val ones_ : out:mat -> unit
val sort_ : mat -> unit
val one_hot_ : out:mat -> int -> mat -> unit
val copy_ : out:mat -> mat -> unit
val reshape_ : out:mat -> mat -> unit
val transpose_ : out:mat -> ?axis:int array -> mat -> unit
val sum_ : out:mat -> axis:int -> mat -> unit
val min_ : out:mat -> axis:int -> mat -> unit
val max_ : out:mat -> axis:int -> mat -> unit
val add_ : ?out:mat -> mat -> mat -> unit
val sub_ : ?out:mat -> mat -> mat -> unit
val mul_ : ?out:mat -> mat -> mat -> unit
val div_ : ?out:mat -> mat -> mat -> unit
val pow_ : ?out:mat -> mat -> mat -> unit
val atan2_ : ?out:mat -> mat -> mat -> unit
val hypot_ : ?out:mat -> mat -> mat -> unit
val fmod_ : ?out:mat -> mat -> mat -> unit
val min2_ : ?out:mat -> mat -> mat -> unit
val max2_ : ?out:mat -> mat -> mat -> unit
val add_scalar_ : ?out:mat -> mat -> elt -> unit
val sub_scalar_ : ?out:mat -> mat -> elt -> unit
val mul_scalar_ : ?out:mat -> mat -> elt -> unit
val div_scalar_ : ?out:mat -> mat -> elt -> unit
val pow_scalar_ : ?out:mat -> mat -> elt -> unit
val atan2_scalar_ : ?out:mat -> mat -> elt -> unit
val fmod_scalar_ : ?out:mat -> mat -> elt -> unit
val scalar_add_ : ?out:mat -> elt -> mat -> unit
val scalar_sub_ : ?out:mat -> elt -> mat -> unit
val scalar_mul_ : ?out:mat -> elt -> mat -> unit
val scalar_div_ : ?out:mat -> elt -> mat -> unit
val scalar_pow_ : ?out:mat -> elt -> mat -> unit
val scalar_atan2_ : ?out:mat -> elt -> mat -> unit
val scalar_fmod_ : ?out:mat -> elt -> mat -> unit
val fma_ : ?out:mat -> mat -> mat -> mat -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:mat -> - mat -> - mat -> - unit
val conj_ : ?out:mat -> mat -> unit
val abs_ : ?out:mat -> mat -> unit
val neg_ : ?out:mat -> mat -> unit
val reci_ : ?out:mat -> mat -> unit
val signum_ : ?out:mat -> mat -> unit
val sqr_ : ?out:mat -> mat -> unit
val sqrt_ : ?out:mat -> mat -> unit
val cbrt_ : ?out:mat -> mat -> unit
val exp_ : ?out:mat -> mat -> unit
val exp2_ : ?out:mat -> mat -> unit
val exp10_ : ?out:mat -> mat -> unit
val expm1_ : ?out:mat -> mat -> unit
val log_ : ?out:mat -> mat -> unit
val log2_ : ?out:mat -> mat -> unit
val log10_ : ?out:mat -> mat -> unit
val log1p_ : ?out:mat -> mat -> unit
val sin_ : ?out:mat -> mat -> unit
val cos_ : ?out:mat -> mat -> unit
val tan_ : ?out:mat -> mat -> unit
val asin_ : ?out:mat -> mat -> unit
val acos_ : ?out:mat -> mat -> unit
val atan_ : ?out:mat -> mat -> unit
val sinh_ : ?out:mat -> mat -> unit
val cosh_ : ?out:mat -> mat -> unit
val tanh_ : ?out:mat -> mat -> unit
val asinh_ : ?out:mat -> mat -> unit
val acosh_ : ?out:mat -> mat -> unit
val atanh_ : ?out:mat -> mat -> unit
val floor_ : ?out:mat -> mat -> unit
val ceil_ : ?out:mat -> mat -> unit
val round_ : ?out:mat -> mat -> unit
val trunc_ : ?out:mat -> mat -> unit
val fix_ : ?out:mat -> mat -> unit
val erf_ : ?out:mat -> mat -> unit
val erfc_ : ?out:mat -> mat -> unit
val relu_ : ?out:mat -> mat -> unit
val softplus_ : ?out:mat -> mat -> unit
val softsign_ : ?out:mat -> mat -> unit
val sigmoid_ : ?out:mat -> mat -> unit
val softmax_ : ?out:mat -> ?axis:int -> mat -> unit
val cumsum_ : ?out:mat -> ?axis:int -> mat -> unit
val cumprod_ : ?out:mat -> ?axis:int -> mat -> unit
val cummin_ : ?out:mat -> ?axis:int -> mat -> unit
val cummax_ : ?out:mat -> ?axis:int -> mat -> unit
val dropout_ : ?out:mat -> ?rate:float -> mat -> unit
val elt_equal_ : ?out:mat -> mat -> mat -> unit
val elt_not_equal_ : ?out:mat -> mat -> mat -> unit
val elt_less_ : ?out:mat -> mat -> mat -> unit
val elt_greater_ : ?out:mat -> mat -> mat -> unit
val elt_less_equal_ : ?out:mat -> mat -> mat -> unit
val elt_greater_equal_ : ?out:mat -> mat -> mat -> unit
val elt_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_not_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_less_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_greater_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_less_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_greater_equal_scalar_ : ?out:mat -> mat -> elt -> unit
include Owl_dense_matrix_intf.Real with type elt := elt and type mat := mat
Specific real functions
val i0 : mat -> mat
val i0e : mat -> mat
val i1 : mat -> mat
val i1e : mat -> mat
val iv : v:mat -> mat -> mat
val scalar_iv : v:elt -> mat -> mat
val iv_scalar : v:mat -> elt -> mat
val j0 : mat -> mat
val j1 : mat -> mat
val jv : v:mat -> mat -> mat
val scalar_jv : v:elt -> mat -> mat
val jv_scalar : v:mat -> elt -> mat
val semidef : int -> mat
val min_rows : mat -> (elt * int * int) array
val min_cols : mat -> (elt * int * int) array
val max_rows : mat -> (elt * int * int) array
val max_cols : mat -> (elt * int * int) array
val signum : mat -> mat
val erf : mat -> mat
val erfc : mat -> mat
val logistic : mat -> mat
val relu : mat -> mat
val elu : ?alpha:elt -> mat -> mat
val leaky_relu : ?alpha:elt -> mat -> mat
val softplus : mat -> mat
val softsign : mat -> mat
val softmax : ?axis:int -> mat -> mat
val sigmoid : mat -> mat
val log_sum_exp' : mat -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> mat -> mat
val max_pool : - ?padding:Owl_types.padding -> - mat -> - int array -> - int array -> - mat
val avg_pool : - ?padding:Owl_types.padding -> - mat -> - int array -> - int array -> - mat
val atan2 : mat -> mat -> mat
val scalar_atan2 : elt -> mat -> mat
val atan2_scalar : mat -> elt -> mat
val hypot : mat -> mat -> mat
val fmod : mat -> mat -> mat
val fmod_scalar : mat -> elt -> mat
val scalar_fmod : elt -> mat -> mat
val cross_entropy' : mat -> mat -> elt
val clip_by_l2norm : elt -> mat -> mat
val poisson : mu:elt -> int -> int -> mat
include module type of struct include Owl_dense_matrix.Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (-$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (*$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (/$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (!=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<>.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (%) : - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val (%$) : - (float, 'a) Owl_dense_matrix_generic.t -> - float -> - (float, 'a) Owl_dense_matrix_generic.t
val (**) : - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val (**$) : - (float, 'a) Owl_dense_matrix_generic.t -> - float -> - (float, 'a) Owl_dense_matrix_generic.t
val (+=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (@=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (@||) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.!{;..}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list array -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list array -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
include sig ... end
val (*@) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.%{}) : ('a, 'b) Owl_dense_matrix_generic.t -> (int * int) -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_matrix_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_matrix_generic.t -> (int * int) -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int array -> - 'a -> - unit
include sig ... end
val (**@) : - ('a, 'b) Owl_linalg_generic.t -> - float -> - ('a, 'b) Owl_linalg_generic.t
val (/@) : - ('a, 'b) Owl_linalg_generic.t -> - ('a, 'b) Owl_linalg_generic.t -> - ('a, 'b) Owl_linalg_generic.t
val mpow : Owl_linalg_d.mat -> float -> Owl_linalg_d.mat
val diag : - ?k:int -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
\ No newline at end of file diff --git a/owl/Owl_algodiff/.dummy b/owl/Owl_algodiff/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_algodiff/D/A/Linalg/index.html b/owl/Owl_algodiff/D/A/Linalg/index.html deleted file mode 100644 index 39845981f..000000000 --- a/owl/Owl_algodiff/D/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_algodiff.D.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/A/Mat/index.html b/owl/Owl_algodiff/D/A/Mat/index.html deleted file mode 100644 index 6f8d4eddd..000000000 --- a/owl/Owl_algodiff/D/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_algodiff.D.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/A/Scalar/index.html b/owl/Owl_algodiff/D/A/Scalar/index.html deleted file mode 100644 index c600b538e..000000000 --- a/owl/Owl_algodiff/D/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_algodiff.D.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/A/index.html b/owl/Owl_algodiff/D/A/index.html deleted file mode 100644 index b1e60d404..000000000 --- a/owl/Owl_algodiff/D/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl.Owl_algodiff.D.A)

Module D.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/Arr/index.html b/owl/Owl_algodiff/D/Arr/index.html deleted file mode 100644 index 853bd0ac9..000000000 --- a/owl/Owl_algodiff/D/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl.Owl_algodiff.D.Arr)

Module D.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/Builder/index.html b/owl/Owl_algodiff/D/Builder/index.html deleted file mode 100644 index 4cee77e3e..000000000 --- a/owl/Owl_algodiff/D/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl.Owl_algodiff.D.Builder)

Module D.Builder

module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t
module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t
module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t
module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array
module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t
module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/Builder/module-type-Aiso/index.html b/owl/Owl_algodiff/D/Builder/module-type-Aiso/index.html deleted file mode 100644 index 561fbe61f..000000000 --- a/owl/Owl_algodiff/D/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl.Owl_algodiff.D.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/Builder/module-type-Piso/index.html b/owl/Owl_algodiff/D/Builder/module-type-Piso/index.html deleted file mode 100644 index 774a89a4a..000000000 --- a/owl/Owl_algodiff/D/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl.Owl_algodiff.D.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/Builder/module-type-Siao/index.html b/owl/Owl_algodiff/D/Builder/module-type-Siao/index.html deleted file mode 100644 index cd92b5520..000000000 --- a/owl/Owl_algodiff/D/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl.Owl_algodiff.D.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/Builder/module-type-Sipo/index.html b/owl/Owl_algodiff/D/Builder/module-type-Sipo/index.html deleted file mode 100644 index 85c4d44d2..000000000 --- a/owl/Owl_algodiff/D/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl.Owl_algodiff.D.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/Builder/module-type-Siso/index.html b/owl/Owl_algodiff/D/Builder/module-type-Siso/index.html deleted file mode 100644 index f7140b088..000000000 --- a/owl/Owl_algodiff/D/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl.Owl_algodiff.D.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/Builder/module-type-Sito/index.html b/owl/Owl_algodiff/D/Builder/module-type-Sito/index.html deleted file mode 100644 index 7643b893a..000000000 --- a/owl/Owl_algodiff/D/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl.Owl_algodiff.D.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/Linalg/index.html b/owl/Owl_algodiff/D/Linalg/index.html deleted file mode 100644 index f58ee16aa..000000000 --- a/owl/Owl_algodiff/D/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_algodiff.D.Linalg)

Module D.Linalg

val inv : t -> t
val logdet : t -> t
val chol : ?upper:bool -> t -> t
val qr : t -> t * t
val lq : t -> t * t
val svd : ?thin:bool -> t -> t * t * t
val sylvester : t -> t -> t -> t
val lyapunov : t -> t -> t
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - t -> - t -> - t
val (/@) : t -> t -> t
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> t -> t -> t
val care : ?diag_r:bool -> t -> t -> t -> t -> t
val dare : ?diag_r:bool -> t -> t -> t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/Mat/index.html b/owl/Owl_algodiff/D/Mat/index.html deleted file mode 100644 index 85da2073f..000000000 --- a/owl/Owl_algodiff/D/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_algodiff.D.Mat)

Module D.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/Maths/index.html b/owl/Owl_algodiff/D/Maths/index.html deleted file mode 100644 index 0e4e70b00..000000000 --- a/owl/Owl_algodiff/D/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl.Owl_algodiff.D.Maths)

Module D.Maths

val (+) : t -> t -> t
val (-) : t -> t -> t
val (*) : t -> t -> t
val (/) : t -> t -> t
val (*@) : t -> t -> t
val (**) : t -> t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val kron : t -> t -> t
val dot : t -> t -> t
val pow : t -> t -> t
val atan2 : t -> t -> t
val min2 : t -> t -> t
val max2 : t -> t -> t
val cross_entropy : t -> t -> t
val inv : t -> t
val neg : t -> t
val abs : t -> t
val signum : t -> t
val floor : t -> t
val ceil : t -> t
val round : t -> t
val sqr : t -> t
val sqrt : t -> t
val log : t -> t
val log2 : t -> t
val log10 : t -> t
val exp : t -> t
val sin : t -> t
val cos : t -> t
val tan : t -> t
val sinh : t -> t
val cosh : t -> t
val tanh : t -> t
val asin : t -> t
val acos : t -> t
val atan : t -> t
val asinh : t -> t
val acosh : t -> t
val atanh : t -> t
val sum' : t -> t
val log_sum_exp' : t -> t
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t
val sum : ?axis:int -> ?keep_dims:bool -> t -> t
val sum_reduce : ?axis:int array -> t -> t
val mean : t -> t
val transpose : ?axis:int array -> t -> t
val swap : int -> int -> t -> t
val l1norm' : t -> t
val l2norm' : t -> t
val l2norm_sqr' : t -> t
val sigmoid : t -> t
val relu : t -> t
val dawsn : t -> t
val softplus : t -> t
val softsign : t -> t
val softmax : ?axis:int -> t -> t
val reshape : t -> int array -> t
val flatten : t -> t
val get_item : t -> int -> int -> t
val get_row : t -> int -> t
val concat : axis:int -> t -> t -> t
val split : axis:int -> int array -> t -> t array
val of_arrays : t array array -> t
val to_arrays : t -> t array array
val concatenate : axis:int -> t array -> t
val stack : axis:int -> t array -> t
val get_slice : int list list -> t -> t
val set_slice : int list list -> t -> t -> t
val get_fancy : Owl_types.index list -> t -> t
val set_fancy : Owl_types.index list -> t -> t -> t
val diag : ?k:int -> t -> t
val diagm : ?k:int -> t -> t
val trace : t -> t
val triu : ?k:int -> t -> t
val tril : ?k:int -> t -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/NN/index.html b/owl/Owl_algodiff/D/NN/index.html deleted file mode 100644 index 9f6fdaae4..000000000 --- a/owl/Owl_algodiff/D/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl.Owl_algodiff.D.NN)

Module D.NN

val dropout : ?rate:float -> t -> t
val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val upsampling2d : t -> int array -> t
val pad : ?v:A.elt -> int list list -> t -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/D/index.html b/owl/Owl_algodiff/D/index.html deleted file mode 100644 index b98802275..000000000 --- a/owl/Owl_algodiff/D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -D (owl.Owl_algodiff.D)

Module Owl_algodiff.D

module A : sig ... end
type t = Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D).t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
val tag : unit -> int
val primal : t -> t
val primal' : t -> t
val zero : t -> t
val reset_zero : t -> t
val tangent : t -> t
val adjref : t -> t Stdlib.ref
val adjval : t -> t
val shape : t -> int array
val is_float : t -> bool
val is_arr : t -> bool
val row_num : t -> int
val col_num : t -> int
val numel : t -> int
val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t
val clip_by_l2norm : A.elt -> t -> t
val copy_primal' : t -> t
val tile : t -> int array -> t
val repeat : t -> int array -> t
val pack_elt : A.elt -> t
val unpack_elt : t -> A.elt
val pack_flt : float -> t
val _f : float -> t
val unpack_flt : t -> float
val pack_arr : A.arr -> t
val unpack_arr : t -> A.arr
val deep_info : t -> string
val type_info : t -> string
val error_binop : string -> t -> t -> 'a
val error_uniop : string -> t -> 'a
val make_forward : t -> t -> int -> t
val make_reverse : t -> int -> t
val reverse_prop : t -> t -> unit
val diff : (t -> t) -> t -> t
val diff' : (t -> t) -> t -> t * t
val grad : (t -> t) -> t -> t
val grad' : (t -> t) -> t -> t * t
val jacobian : (t -> t) -> t -> t
val jacobian' : (t -> t) -> t -> t * t
val jacobianv : (t -> t) -> t -> t -> t
val jacobianv' : (t -> t) -> t -> t -> t * t
val jacobianTv : (t -> t) -> t -> t -> t
val jacobianTv' : (t -> t) -> t -> t -> t * t
val hessian : (t -> t) -> t -> t
val hessian' : (t -> t) -> t -> t * t
val hessianv : (t -> t) -> t -> t -> t
val hessianv' : (t -> t) -> t -> t -> t * t
val laplacian : (t -> t) -> t -> t
val laplacian' : (t -> t) -> t -> t * t
val gradhessian : (t -> t) -> t -> t * t
val gradhessian' : (t -> t) -> t -> t * t * t
val gradhessianv : (t -> t) -> t -> t -> t * t
val gradhessianv' : (t -> t) -> t -> t -> t * t * t
module Builder : sig ... end
module Maths : sig ... end
module Linalg : sig ... end
module NN : sig ... end
module Mat : sig ... end
module Arr : sig ... end
val to_trace : t list -> string
val to_dot : t list -> string
val pp_num : Stdlib.Format.formatter -> t -> unit
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/A/Linalg/index.html b/owl/Owl_algodiff/S/A/Linalg/index.html deleted file mode 100644 index b11b63b81..000000000 --- a/owl/Owl_algodiff/S/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_algodiff.S.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/A/Mat/index.html b/owl/Owl_algodiff/S/A/Mat/index.html deleted file mode 100644 index ec80d492d..000000000 --- a/owl/Owl_algodiff/S/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_algodiff.S.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/A/Scalar/index.html b/owl/Owl_algodiff/S/A/Scalar/index.html deleted file mode 100644 index 1ccc7f0ae..000000000 --- a/owl/Owl_algodiff/S/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_algodiff.S.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/A/index.html b/owl/Owl_algodiff/S/A/index.html deleted file mode 100644 index 31b3c7bbe..000000000 --- a/owl/Owl_algodiff/S/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl.Owl_algodiff.S.A)

Module S.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/Arr/index.html b/owl/Owl_algodiff/S/Arr/index.html deleted file mode 100644 index ba3fe4620..000000000 --- a/owl/Owl_algodiff/S/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl.Owl_algodiff.S.Arr)

Module S.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/Builder/index.html b/owl/Owl_algodiff/S/Builder/index.html deleted file mode 100644 index 5589d8e9c..000000000 --- a/owl/Owl_algodiff/S/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl.Owl_algodiff.S.Builder)

Module S.Builder

module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t
module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t
module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t
module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array
module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t
module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/Builder/module-type-Aiso/index.html b/owl/Owl_algodiff/S/Builder/module-type-Aiso/index.html deleted file mode 100644 index 19935b61b..000000000 --- a/owl/Owl_algodiff/S/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl.Owl_algodiff.S.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/Builder/module-type-Piso/index.html b/owl/Owl_algodiff/S/Builder/module-type-Piso/index.html deleted file mode 100644 index 18d3e5c2c..000000000 --- a/owl/Owl_algodiff/S/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl.Owl_algodiff.S.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/Builder/module-type-Siao/index.html b/owl/Owl_algodiff/S/Builder/module-type-Siao/index.html deleted file mode 100644 index 93e862fb7..000000000 --- a/owl/Owl_algodiff/S/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl.Owl_algodiff.S.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/Builder/module-type-Sipo/index.html b/owl/Owl_algodiff/S/Builder/module-type-Sipo/index.html deleted file mode 100644 index 5265b1a62..000000000 --- a/owl/Owl_algodiff/S/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl.Owl_algodiff.S.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/Builder/module-type-Siso/index.html b/owl/Owl_algodiff/S/Builder/module-type-Siso/index.html deleted file mode 100644 index 65e6f1ef7..000000000 --- a/owl/Owl_algodiff/S/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl.Owl_algodiff.S.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/Builder/module-type-Sito/index.html b/owl/Owl_algodiff/S/Builder/module-type-Sito/index.html deleted file mode 100644 index 39a4e9215..000000000 --- a/owl/Owl_algodiff/S/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl.Owl_algodiff.S.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/Linalg/index.html b/owl/Owl_algodiff/S/Linalg/index.html deleted file mode 100644 index 2aa84ffd8..000000000 --- a/owl/Owl_algodiff/S/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_algodiff.S.Linalg)

Module S.Linalg

val inv : t -> t
val logdet : t -> t
val chol : ?upper:bool -> t -> t
val qr : t -> t * t
val lq : t -> t * t
val svd : ?thin:bool -> t -> t * t * t
val sylvester : t -> t -> t -> t
val lyapunov : t -> t -> t
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - t -> - t -> - t
val (/@) : t -> t -> t
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> t -> t -> t
val care : ?diag_r:bool -> t -> t -> t -> t -> t
val dare : ?diag_r:bool -> t -> t -> t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/Mat/index.html b/owl/Owl_algodiff/S/Mat/index.html deleted file mode 100644 index e64ce3fbd..000000000 --- a/owl/Owl_algodiff/S/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_algodiff.S.Mat)

Module S.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/Maths/index.html b/owl/Owl_algodiff/S/Maths/index.html deleted file mode 100644 index 66281ea86..000000000 --- a/owl/Owl_algodiff/S/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl.Owl_algodiff.S.Maths)

Module S.Maths

val (+) : t -> t -> t
val (-) : t -> t -> t
val (*) : t -> t -> t
val (/) : t -> t -> t
val (*@) : t -> t -> t
val (**) : t -> t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val kron : t -> t -> t
val dot : t -> t -> t
val pow : t -> t -> t
val atan2 : t -> t -> t
val min2 : t -> t -> t
val max2 : t -> t -> t
val cross_entropy : t -> t -> t
val inv : t -> t
val neg : t -> t
val abs : t -> t
val signum : t -> t
val floor : t -> t
val ceil : t -> t
val round : t -> t
val sqr : t -> t
val sqrt : t -> t
val log : t -> t
val log2 : t -> t
val log10 : t -> t
val exp : t -> t
val sin : t -> t
val cos : t -> t
val tan : t -> t
val sinh : t -> t
val cosh : t -> t
val tanh : t -> t
val asin : t -> t
val acos : t -> t
val atan : t -> t
val asinh : t -> t
val acosh : t -> t
val atanh : t -> t
val sum' : t -> t
val log_sum_exp' : t -> t
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t
val sum : ?axis:int -> ?keep_dims:bool -> t -> t
val sum_reduce : ?axis:int array -> t -> t
val mean : t -> t
val transpose : ?axis:int array -> t -> t
val swap : int -> int -> t -> t
val l1norm' : t -> t
val l2norm' : t -> t
val l2norm_sqr' : t -> t
val sigmoid : t -> t
val relu : t -> t
val dawsn : t -> t
val softplus : t -> t
val softsign : t -> t
val softmax : ?axis:int -> t -> t
val reshape : t -> int array -> t
val flatten : t -> t
val get_item : t -> int -> int -> t
val get_row : t -> int -> t
val concat : axis:int -> t -> t -> t
val split : axis:int -> int array -> t -> t array
val of_arrays : t array array -> t
val to_arrays : t -> t array array
val concatenate : axis:int -> t array -> t
val stack : axis:int -> t array -> t
val get_slice : int list list -> t -> t
val set_slice : int list list -> t -> t -> t
val get_fancy : Owl_types.index list -> t -> t
val set_fancy : Owl_types.index list -> t -> t -> t
val diag : ?k:int -> t -> t
val diagm : ?k:int -> t -> t
val trace : t -> t
val triu : ?k:int -> t -> t
val tril : ?k:int -> t -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/NN/index.html b/owl/Owl_algodiff/S/NN/index.html deleted file mode 100644 index c5616e75a..000000000 --- a/owl/Owl_algodiff/S/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl.Owl_algodiff.S.NN)

Module S.NN

val dropout : ?rate:float -> t -> t
val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val upsampling2d : t -> int array -> t
val pad : ?v:A.elt -> int list list -> t -> t
\ No newline at end of file diff --git a/owl/Owl_algodiff/S/index.html b/owl/Owl_algodiff/S/index.html deleted file mode 100644 index 80adbbaa3..000000000 --- a/owl/Owl_algodiff/S/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -S (owl.Owl_algodiff.S)

Module Owl_algodiff.S

module A : sig ... end
type t = Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S).t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
val tag : unit -> int
val primal : t -> t
val primal' : t -> t
val zero : t -> t
val reset_zero : t -> t
val tangent : t -> t
val adjref : t -> t Stdlib.ref
val adjval : t -> t
val shape : t -> int array
val is_float : t -> bool
val is_arr : t -> bool
val row_num : t -> int
val col_num : t -> int
val numel : t -> int
val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t
val clip_by_l2norm : A.elt -> t -> t
val copy_primal' : t -> t
val tile : t -> int array -> t
val repeat : t -> int array -> t
val pack_elt : A.elt -> t
val unpack_elt : t -> A.elt
val pack_flt : float -> t
val _f : float -> t
val unpack_flt : t -> float
val pack_arr : A.arr -> t
val unpack_arr : t -> A.arr
val deep_info : t -> string
val type_info : t -> string
val error_binop : string -> t -> t -> 'a
val error_uniop : string -> t -> 'a
val make_forward : t -> t -> int -> t
val make_reverse : t -> int -> t
val reverse_prop : t -> t -> unit
val diff : (t -> t) -> t -> t
val diff' : (t -> t) -> t -> t * t
val grad : (t -> t) -> t -> t
val grad' : (t -> t) -> t -> t * t
val jacobian : (t -> t) -> t -> t
val jacobian' : (t -> t) -> t -> t * t
val jacobianv : (t -> t) -> t -> t -> t
val jacobianv' : (t -> t) -> t -> t -> t * t
val jacobianTv : (t -> t) -> t -> t -> t
val jacobianTv' : (t -> t) -> t -> t -> t * t
val hessian : (t -> t) -> t -> t
val hessian' : (t -> t) -> t -> t * t
val hessianv : (t -> t) -> t -> t -> t
val hessianv' : (t -> t) -> t -> t -> t * t
val laplacian : (t -> t) -> t -> t
val laplacian' : (t -> t) -> t -> t * t
val gradhessian : (t -> t) -> t -> t * t
val gradhessian' : (t -> t) -> t -> t * t * t
val gradhessianv : (t -> t) -> t -> t -> t * t
val gradhessianv' : (t -> t) -> t -> t -> t * t * t
module Builder : sig ... end
module Maths : sig ... end
module Linalg : sig ... end
module NN : sig ... end
module Mat : sig ... end
module Arr : sig ... end
val to_trace : t list -> string
val to_dot : t list -> string
val pp_num : Stdlib.Format.formatter -> t -> unit
\ No newline at end of file diff --git a/owl/Owl_algodiff_primal_ops/.dummy b/owl/Owl_algodiff_primal_ops/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_algodiff_primal_ops/D/Linalg/index.html b/owl/Owl_algodiff_primal_ops/D/Linalg/index.html deleted file mode 100644 index 93a1b1ae0..000000000 --- a/owl/Owl_algodiff_primal_ops/D/Linalg/index.html +++ /dev/null @@ -1,25 +0,0 @@ - -Linalg (owl.Owl_algodiff_primal_ops.D.Linalg)

Module D.Linalg

include module type of struct include Owl_linalg.D end
include module type of struct include Owl_linalg_d end
type elt = float
type complex_mat = Owl_dense_matrix_z.mat
type int32_mat = (int32, Stdlib.Bigarray.int32_elt) Owl_dense_matrix_generic.t
include Owl_linalg_intf.Common - with type elt := elt - and type mat := mat - and type complex_mat := complex_mat - and type int32_mat := int32_mat
include Owl_base_linalg_intf.Common - with type elt := elt - with type mat := mat - with type complex_mat := complex_mat - with type int32_mat := int32_mat
Basic functions
val inv : mat -> mat
val det : mat -> elt
val logdet : mat -> elt
val is_triu : mat -> bool
val is_tril : mat -> bool
val is_symmetric : mat -> bool
val is_diag : mat -> bool
Factorisation
val svd : ?thin:bool -> mat -> mat * mat * mat
val chol : ?upper:bool -> mat -> mat
Linear system of equations
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> mat -> mat -> mat
val sylvester : mat -> mat -> mat -> mat
val lyapunov : mat -> mat -> mat
val discrete_lyapunov : - ?solver:[ `default | `direct | `bilinear ] -> - mat -> - mat -> - mat
Basic functions
val pinv : ?tol:float -> mat -> mat
val rank : ?tol:float -> mat -> int
val norm : ?p:float -> mat -> float
val vecnorm : ?p:float -> mat -> float
val cond : ?p:float -> mat -> float
val rcond : mat -> float
val is_posdef : mat -> bool
Factorisation
val lu : mat -> mat * mat * int32_mat
val svdvals : mat -> mat
val gsvd : mat -> mat -> mat * mat * mat * mat * mat * mat
val gsvdvals : mat -> mat -> mat
val schur : mat -> mat * mat * complex_mat
val schur_tz : mat -> mat * mat
val ordschur : select:int32_mat -> mat -> mat -> mat * mat * complex_mat
val qz : mat -> mat -> mat * mat * mat * mat * complex_mat
val ordqz : - select:int32_mat -> - mat -> - mat -> - mat -> - mat -> - mat * mat * mat * mat * complex_mat
val qzvals : mat -> mat -> complex_mat
val hess : mat -> mat * mat
Eigenvalues & eigenvectors
val eig : ?permute:bool -> ?scale:bool -> mat -> complex_mat * complex_mat
val eigvals : ?permute:bool -> ?scale:bool -> mat -> complex_mat
Linear system of equations
val null : mat -> mat
val triangular_solve : upper:bool -> ?trans:bool -> mat -> mat -> mat
val linreg : mat -> mat -> elt * elt
Low-level factorisation functions
val lufact : mat -> mat * int32_mat
val qrfact : ?pivot:bool -> mat -> mat * mat * int32_mat
val bkfact : - ?upper:bool -> - ?symmetric:bool -> - ?rook:bool -> - mat -> - mat * int32_mat
Matrix functions
val mpow : mat -> float -> mat
val expm : mat -> mat
val sinm : mat -> mat
val cosm : mat -> mat
val tanm : mat -> mat
val sincosm : mat -> mat * mat
val sinhm : mat -> mat
val coshm : mat -> mat
val tanhm : mat -> mat
val sinhcoshm : mat -> mat * mat
Helper functions
val select_ev : [ `LHP | `RHP | `UDI | `UDO ] -> mat -> int32_mat
val peakflops : ?n:int -> unit -> float
include Owl_linalg_intf.Real with type mat := mat and type elt := elt
include Owl_base_linalg_intf.Real with type mat := mat with type elt := elt
val care : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
val dare : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
val qr : mat -> mat * mat
val lq : mat -> mat * mat
\ No newline at end of file diff --git a/owl/Owl_algodiff_primal_ops/D/Mat/index.html b/owl/Owl_algodiff_primal_ops/D/Mat/index.html deleted file mode 100644 index 4caed2252..000000000 --- a/owl/Owl_algodiff_primal_ops/D/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_algodiff_primal_ops.D.Mat)

Module D.Mat

\ No newline at end of file diff --git a/owl/Owl_algodiff_primal_ops/D/index.html b/owl/Owl_algodiff_primal_ops/D/index.html deleted file mode 100644 index a5a72b74e..000000000 --- a/owl/Owl_algodiff_primal_ops/D/index.html +++ /dev/null @@ -1,579 +0,0 @@ - -D (owl.Owl_algodiff_primal_ops.D)

Module Owl_algodiff_primal_ops.D

include module type of struct include Owl_dense_ndarray.D end
include module type of struct include Owl_dense_ndarray_d end
type elt = float
type arr = - (float, Stdlib.Bigarray.float64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
include Owl_dense_ndarray_intf.Common with type elt := elt and type arr := arr
include Owl_base_dense_ndarray_intf.Common - with type elt := elt - with type arr := arr
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
Create N-dimensional array
val linspace : elt -> elt -> int -> arr

linspace k 0. 9. 10 ...

val logspace : ?base:float -> elt -> elt -> int -> arr

logspace k 0. 9. 10 ...

val unit_basis : int -> int -> arr

unit_basis k n i returns a unit basis vector with ith element set to 1.

Obtain basic properties
val num_dims : arr -> int
val nth_dim : arr -> int -> int
val nnz : arr -> int
val density : arr -> float
val size_in_bytes : arr -> int
val same_shape : arr -> arr -> bool
val same_data : arr -> arr -> bool
val ind : arr -> int -> int array
val i1d : arr -> int array -> int
Manipulate a N-dimensional array
val get_index : arr -> int array array -> elt array
val set_index : arr -> int array array -> elt array -> unit
val get_fancy : Owl_types.index list -> arr -> arr
val set_fancy : Owl_types.index list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val sub_ndarray : int array -> arr -> arr array
val slice_left : arr -> int array -> arr
val fill : arr -> elt -> unit
val resize : ?head:bool -> arr -> int array -> arr
val flip : ?axis:int -> arr -> arr
val rotate : arr -> int -> arr
val swap : int -> int -> arr -> arr
val concat_vertical : arr -> arr -> arr
val concat_horizontal : arr -> arr -> arr
val concat_vh : arr array array -> arr
val split_vh : (int * int) array array -> arr -> arr array array
val dropout : ?rate:float -> arr -> arr
val top : arr -> int -> int array array
val bottom : arr -> int -> int array array
val sort : arr -> arr
val sort1 : ?axis:int -> arr -> arr
val argsort : - arr -> - (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val mmap : Unix.file_descr -> ?pos:int64 -> bool -> int array -> arr
Iterate array elements
val iter2i : (int -> elt -> elt -> unit) -> arr -> arr -> unit
val iter2 : (elt -> elt -> unit) -> arr -> arr -> unit
val map2i : (int -> elt -> elt -> elt) -> arr -> arr -> arr
val map2 : (elt -> elt -> elt) -> arr -> arr -> arr
val iteri_nd : (int array -> elt -> unit) -> arr -> unit
val mapi_nd : (int array -> elt -> elt) -> arr -> arr
val foldi_nd : - ?axis:int -> - (int array -> elt -> elt -> elt) -> - elt -> - arr -> - arr
val scani_nd : ?axis:int -> (int array -> elt -> elt -> elt) -> arr -> arr
val filteri_nd : (int array -> elt -> bool) -> arr -> int array array
val iter2i_nd : (int array -> elt -> elt -> unit) -> arr -> arr -> unit
val map2i_nd : (int array -> elt -> elt -> elt) -> arr -> arr -> arr
val iteri_slice : ?axis:int -> (int -> arr -> unit) -> arr -> unit
val iter_slice : ?axis:int -> (arr -> unit) -> arr -> unit
val mapi_slice : ?axis:int -> (int -> arr -> 'c) -> arr -> 'c array
val map_slice : ?axis:int -> (arr -> 'c) -> arr -> 'c array
val filteri_slice : ?axis:int -> (int -> arr -> bool) -> arr -> arr array
val filter_slice : ?axis:int -> (arr -> bool) -> arr -> arr array
val foldi_slice : ?axis:int -> (int -> 'c -> arr -> 'c) -> 'c -> arr -> 'c
val fold_slice : ?axis:int -> ('c -> arr -> 'c) -> 'c -> arr -> 'c
Examine array elements or compare two arrays
Input/Output functions
val to_array : arr -> elt array
val save : out:string -> arr -> unit
val load : string -> arr
val save_npy : out:string -> arr -> unit
val load_npy : string -> arr
Unary mathematical operations
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod' : arr -> elt
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean' : arr -> elt
val median' : arr -> elt
val median : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var' : arr -> elt
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std' : arr -> elt
val sem : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sem' : arr -> elt
val minmax : ?axis:int -> ?keep_dims:bool -> arr -> arr * arr
val minmax' : arr -> elt * elt
val min_i : arr -> elt * int array
val max_i : arr -> elt * int array
val minmax_i : arr -> (elt * int array) * (elt * int array)
val abs2 : arr -> arr
val conj : arr -> arr
val reci : arr -> arr
val reci_tol : ?tol:elt -> arr -> arr
val cbrt : arr -> arr
val exp2 : arr -> arr
val exp10 : arr -> arr
val expm1 : arr -> arr
val log1p : arr -> arr
val trunc : arr -> arr
val fix : arr -> arr
val modf : arr -> arr * arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> arr -> arr
val vecnorm : ?axis:int -> ?p:float -> ?keep_dims:bool -> arr -> arr
val vecnorm' : ?p:float -> arr -> elt
val cumsum : ?axis:int -> arr -> arr
val cumprod : ?axis:int -> arr -> arr
val cummin : ?axis:int -> arr -> arr
val cummax : ?axis:int -> arr -> arr
val diff : ?axis:int -> ?n:int -> arr -> arr
val lgamma : arr -> arr
Binary mathematical operations
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val ssqr' : arr -> elt -> elt
val ssqr_diff' : arr -> arr -> elt
Tensor Calculus
val contract1 : (int * int) array -> arr -> arr
val contract2 : (int * int) array -> arr -> arr -> arr
Experimental functions
val slide : ?axis:int -> ?ofs:int -> ?step:int -> window:int -> arr -> arr
Functions of in-place modification
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:float -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val sort_ : arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_fancy_ : out:arr -> Owl_types.index list -> arr -> unit
val set_fancy_ : out:arr -> Owl_types.index list -> arr -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val conj_ : ?out:arr -> arr -> unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit

Matrix functions

val col : arr -> int -> arr
val cols : arr -> int array -> arr
val to_arrays : arr -> elt array array
val draw_rows : ?replacement:bool -> arr -> int -> arr * int array
val draw_cols : ?replacement:bool -> arr -> int -> arr * int array
val draw_rows2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
val draw_cols2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
include Owl_dense_ndarray_intf.Real with type elt := elt and type arr := arr
include Owl_base_dense_ndarray_intf.Real - with type elt := elt - with type arr := arr
val sum_slices : ?axis:int -> arr -> arr
val signum : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val atan2 : arr -> arr -> arr
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> float -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> float -> arr
val dot : arr -> arr -> arr
val trace : arr -> elt
Helper functions
val float_to_elt : float -> elt
val elt_to_float : elt -> float
Real operations
val i0 : arr -> arr
val i0e : arr -> arr
val i1 : arr -> arr
val i1e : arr -> arr
val iv : v:arr -> arr -> arr
val scalar_iv : v:elt -> arr -> arr
val iv_scalar : v:arr -> elt -> arr
val j0 : arr -> arr
val j1 : arr -> arr
val jv : v:arr -> arr -> arr
val scalar_jv : v:elt -> arr -> arr
val jv_scalar : v:arr -> elt -> arr
val erf : arr -> arr
val erfc : arr -> arr
val logistic : arr -> arr
val elu : ?alpha:elt -> arr -> arr
val leaky_relu : ?alpha:elt -> arr -> arr
val softplus : arr -> arr
val softsign : arr -> arr
val softmax : ?axis:int -> arr -> arr
val sigmoid : arr -> arr
val log_sum_exp' : arr -> float
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val fmod_scalar : arr -> elt -> arr
val scalar_fmod : elt -> arr -> arr
val cross_entropy' : arr -> arr -> float
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
val poisson : mu:elt -> int array -> arr
val poisson_ : mu:elt -> out:arr -> unit
include Owl_dense_ndarray_intf.NN with type arr := arr
include Owl_base_dense_ndarray_intf.NN with type arr := arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val max_pool2d_argmax : - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - arr - * (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
include Owl_dense_ndarray_intf.Distribution with type arr := arr
Stats & distribution functions
val uniform_rvs : a:arr -> b:arr -> n:int -> arr
val uniform_pdf : a:arr -> b:arr -> arr -> arr
val uniform_logpdf : a:arr -> b:arr -> arr -> arr
val uniform_cdf : a:arr -> b:arr -> arr -> arr
val uniform_logcdf : a:arr -> b:arr -> arr -> arr
val uniform_ppf : a:arr -> b:arr -> arr -> arr
val uniform_sf : a:arr -> b:arr -> arr -> arr
val uniform_logsf : a:arr -> b:arr -> arr -> arr
val uniform_isf : a:arr -> b:arr -> arr -> arr
val gaussian_rvs : mu:arr -> sigma:arr -> n:int -> arr
val gaussian_pdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logpdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_cdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logcdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_ppf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_sf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logsf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_isf : mu:arr -> sigma:arr -> arr -> arr
val exponential_rvs : lambda:arr -> n:int -> arr
val exponential_pdf : lambda:arr -> arr -> arr
val exponential_logpdf : lambda:arr -> arr -> arr
val exponential_cdf : lambda:arr -> arr -> arr
val exponential_logcdf : lambda:arr -> arr -> arr
val exponential_ppf : lambda:arr -> arr -> arr
val exponential_sf : lambda:arr -> arr -> arr
val exponential_logsf : lambda:arr -> arr -> arr
val exponential_isf : lambda:arr -> arr -> arr
val gamma_rvs : shape:arr -> scale:arr -> n:int -> arr
val gamma_pdf : shape:arr -> scale:arr -> arr -> arr
val gamma_logpdf : shape:arr -> scale:arr -> arr -> arr
val gamma_cdf : shape:arr -> scale:arr -> arr -> arr
val gamma_logcdf : shape:arr -> scale:arr -> arr -> arr
val gamma_ppf : shape:arr -> scale:arr -> arr -> arr
val gamma_sf : shape:arr -> scale:arr -> arr -> arr
val gamma_logsf : shape:arr -> scale:arr -> arr -> arr
val gamma_isf : shape:arr -> scale:arr -> arr -> arr
val beta_rvs : a:arr -> b:arr -> n:int -> arr
val beta_pdf : a:arr -> b:arr -> arr -> arr
val beta_logpdf : a:arr -> b:arr -> arr -> arr
val beta_cdf : a:arr -> b:arr -> arr -> arr
val beta_logcdf : a:arr -> b:arr -> arr -> arr
val beta_ppf : a:arr -> b:arr -> arr -> arr
val beta_sf : a:arr -> b:arr -> arr -> arr
val beta_logsf : a:arr -> b:arr -> arr -> arr
val beta_isf : a:arr -> b:arr -> arr -> arr
val chi2_rvs : df:arr -> n:int -> arr
val chi2_pdf : df:arr -> arr -> arr
val chi2_logpdf : df:arr -> arr -> arr
val chi2_cdf : df:arr -> arr -> arr
val chi2_logcdf : df:arr -> arr -> arr
val chi2_ppf : df:arr -> arr -> arr
val chi2_sf : df:arr -> arr -> arr
val chi2_logsf : df:arr -> arr -> arr
val chi2_isf : df:arr -> arr -> arr
val f_rvs : dfnum:arr -> dfden:arr -> n:int -> arr
val f_pdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logpdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_cdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logcdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_ppf : dfnum:arr -> dfden:arr -> arr -> arr
val f_sf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logsf : dfnum:arr -> dfden:arr -> arr -> arr
val f_isf : dfnum:arr -> dfden:arr -> arr -> arr
val cauchy_rvs : loc:arr -> scale:arr -> n:int -> arr
val cauchy_pdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logpdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_cdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logcdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_ppf : loc:arr -> scale:arr -> arr -> arr
val cauchy_sf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logsf : loc:arr -> scale:arr -> arr -> arr
val cauchy_isf : loc:arr -> scale:arr -> arr -> arr
val lomax_rvs : shape:arr -> scale:arr -> n:int -> arr
val lomax_pdf : shape:arr -> scale:arr -> arr -> arr
val lomax_logpdf : shape:arr -> scale:arr -> arr -> arr
val lomax_cdf : shape:arr -> scale:arr -> arr -> arr
val lomax_logcdf : shape:arr -> scale:arr -> arr -> arr
val lomax_ppf : shape:arr -> scale:arr -> arr -> arr
val lomax_sf : shape:arr -> scale:arr -> arr -> arr
val lomax_logsf : shape:arr -> scale:arr -> arr -> arr
val lomax_isf : shape:arr -> scale:arr -> arr -> arr
val weibull_rvs : shape:arr -> scale:arr -> n:int -> arr
val weibull_pdf : shape:arr -> scale:arr -> arr -> arr
val weibull_logpdf : shape:arr -> scale:arr -> arr -> arr
val weibull_cdf : shape:arr -> scale:arr -> arr -> arr
val weibull_logcdf : shape:arr -> scale:arr -> arr -> arr
val weibull_ppf : shape:arr -> scale:arr -> arr -> arr
val weibull_sf : shape:arr -> scale:arr -> arr -> arr
val weibull_logsf : shape:arr -> scale:arr -> arr -> arr
val weibull_isf : shape:arr -> scale:arr -> arr -> arr
val laplace_rvs : loc:arr -> scale:arr -> n:int -> arr
val laplace_pdf : loc:arr -> scale:arr -> arr -> arr
val laplace_logpdf : loc:arr -> scale:arr -> arr -> arr
val laplace_cdf : loc:arr -> scale:arr -> arr -> arr
val laplace_logcdf : loc:arr -> scale:arr -> arr -> arr
val laplace_ppf : loc:arr -> scale:arr -> arr -> arr
val laplace_sf : loc:arr -> scale:arr -> arr -> arr
val laplace_logsf : loc:arr -> scale:arr -> arr -> arr
val laplace_isf : loc:arr -> scale:arr -> arr -> arr
val gumbel1_rvs : a:arr -> b:arr -> n:int -> arr
val gumbel1_pdf : a:arr -> b:arr -> arr -> arr
val gumbel1_logpdf : a:arr -> b:arr -> arr -> arr
val gumbel1_cdf : a:arr -> b:arr -> arr -> arr
val gumbel1_logcdf : a:arr -> b:arr -> arr -> arr
val gumbel1_ppf : a:arr -> b:arr -> arr -> arr
val gumbel1_sf : a:arr -> b:arr -> arr -> arr
val gumbel1_logsf : a:arr -> b:arr -> arr -> arr
val gumbel1_isf : a:arr -> b:arr -> arr -> arr
val gumbel2_rvs : a:arr -> b:arr -> n:int -> arr
val gumbel2_pdf : a:arr -> b:arr -> arr -> arr
val gumbel2_logpdf : a:arr -> b:arr -> arr -> arr
val gumbel2_cdf : a:arr -> b:arr -> arr -> arr
val gumbel2_logcdf : a:arr -> b:arr -> arr -> arr
val gumbel2_ppf : a:arr -> b:arr -> arr -> arr
val gumbel2_sf : a:arr -> b:arr -> arr -> arr
val gumbel2_logsf : a:arr -> b:arr -> arr -> arr
val gumbel2_isf : a:arr -> b:arr -> arr -> arr
val logistic_rvs : loc:arr -> scale:arr -> n:int -> arr
val logistic_pdf : loc:arr -> scale:arr -> arr -> arr
val logistic_logpdf : loc:arr -> scale:arr -> arr -> arr
val logistic_cdf : loc:arr -> scale:arr -> arr -> arr
val logistic_logcdf : loc:arr -> scale:arr -> arr -> arr
val logistic_ppf : loc:arr -> scale:arr -> arr -> arr
val logistic_sf : loc:arr -> scale:arr -> arr -> arr
val logistic_logsf : loc:arr -> scale:arr -> arr -> arr
val logistic_isf : loc:arr -> scale:arr -> arr -> arr
val lognormal_rvs : mu:arr -> sigma:arr -> n:int -> arr
val lognormal_pdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logpdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_cdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logcdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_ppf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_sf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logsf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_isf : mu:arr -> sigma:arr -> arr -> arr
val rayleigh_rvs : sigma:arr -> n:int -> arr
val rayleigh_pdf : sigma:arr -> arr -> arr
val rayleigh_logpdf : sigma:arr -> arr -> arr
val rayleigh_cdf : sigma:arr -> arr -> arr
val rayleigh_logcdf : sigma:arr -> arr -> arr
val rayleigh_ppf : sigma:arr -> arr -> arr
val rayleigh_sf : sigma:arr -> arr -> arr
val rayleigh_logsf : sigma:arr -> arr -> arr
val rayleigh_isf : sigma:arr -> arr -> arr
include module type of struct include Owl_dense_ndarray.Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (%) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (%$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (+=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (.!{;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
include sig ... end
val (.%{}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
val mpow : Owl_linalg_d.mat -> float -> Owl_linalg_d.mat
module Scalar = Owl_maths
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_algodiff_primal_ops/S/Linalg/index.html b/owl/Owl_algodiff_primal_ops/S/Linalg/index.html deleted file mode 100644 index 0de92b6ad..000000000 --- a/owl/Owl_algodiff_primal_ops/S/Linalg/index.html +++ /dev/null @@ -1,25 +0,0 @@ - -Linalg (owl.Owl_algodiff_primal_ops.S.Linalg)

Module S.Linalg

include module type of struct include Owl_linalg.S end
include module type of struct include Owl_linalg_s end
type elt = float
type complex_mat = Owl_dense_matrix_c.mat
type int32_mat = (int32, Stdlib.Bigarray.int32_elt) Owl_dense_matrix_generic.t
include Owl_linalg_intf.Common - with type elt := elt - and type mat := mat - and type complex_mat := complex_mat - and type int32_mat := int32_mat
include Owl_base_linalg_intf.Common - with type elt := elt - with type mat := mat - with type complex_mat := complex_mat - with type int32_mat := int32_mat
Basic functions
val inv : mat -> mat
val det : mat -> elt
val logdet : mat -> elt
val is_triu : mat -> bool
val is_tril : mat -> bool
val is_symmetric : mat -> bool
val is_diag : mat -> bool
Factorisation
val svd : ?thin:bool -> mat -> mat * mat * mat
val chol : ?upper:bool -> mat -> mat
Linear system of equations
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> mat -> mat -> mat
val sylvester : mat -> mat -> mat -> mat
val lyapunov : mat -> mat -> mat
val discrete_lyapunov : - ?solver:[ `default | `direct | `bilinear ] -> - mat -> - mat -> - mat
Basic functions
val pinv : ?tol:float -> mat -> mat
val rank : ?tol:float -> mat -> int
val norm : ?p:float -> mat -> float
val vecnorm : ?p:float -> mat -> float
val cond : ?p:float -> mat -> float
val rcond : mat -> float
val is_posdef : mat -> bool
Factorisation
val lu : mat -> mat * mat * int32_mat
val svdvals : mat -> mat
val gsvd : mat -> mat -> mat * mat * mat * mat * mat * mat
val gsvdvals : mat -> mat -> mat
val schur : mat -> mat * mat * complex_mat
val schur_tz : mat -> mat * mat
val ordschur : select:int32_mat -> mat -> mat -> mat * mat * complex_mat
val qz : mat -> mat -> mat * mat * mat * mat * complex_mat
val ordqz : - select:int32_mat -> - mat -> - mat -> - mat -> - mat -> - mat * mat * mat * mat * complex_mat
val qzvals : mat -> mat -> complex_mat
val hess : mat -> mat * mat
Eigenvalues & eigenvectors
val eig : ?permute:bool -> ?scale:bool -> mat -> complex_mat * complex_mat
val eigvals : ?permute:bool -> ?scale:bool -> mat -> complex_mat
Linear system of equations
val null : mat -> mat
val triangular_solve : upper:bool -> ?trans:bool -> mat -> mat -> mat
val linreg : mat -> mat -> elt * elt
Low-level factorisation functions
val lufact : mat -> mat * int32_mat
val qrfact : ?pivot:bool -> mat -> mat * mat * int32_mat
val bkfact : - ?upper:bool -> - ?symmetric:bool -> - ?rook:bool -> - mat -> - mat * int32_mat
Matrix functions
val mpow : mat -> float -> mat
val expm : mat -> mat
val sinm : mat -> mat
val cosm : mat -> mat
val tanm : mat -> mat
val sincosm : mat -> mat * mat
val sinhm : mat -> mat
val coshm : mat -> mat
val tanhm : mat -> mat
val sinhcoshm : mat -> mat * mat
Helper functions
val select_ev : [ `LHP | `RHP | `UDI | `UDO ] -> mat -> int32_mat
val peakflops : ?n:int -> unit -> float
include Owl_linalg_intf.Real with type mat := mat and type elt := elt
include Owl_base_linalg_intf.Real with type mat := mat with type elt := elt
val care : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
val dare : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
val qr : mat -> mat * mat
val lq : mat -> mat * mat
\ No newline at end of file diff --git a/owl/Owl_algodiff_primal_ops/S/Mat/index.html b/owl/Owl_algodiff_primal_ops/S/Mat/index.html deleted file mode 100644 index eed6376dc..000000000 --- a/owl/Owl_algodiff_primal_ops/S/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_algodiff_primal_ops.S.Mat)

Module S.Mat

\ No newline at end of file diff --git a/owl/Owl_algodiff_primal_ops/S/index.html b/owl/Owl_algodiff_primal_ops/S/index.html deleted file mode 100644 index 7109a2d7c..000000000 --- a/owl/Owl_algodiff_primal_ops/S/index.html +++ /dev/null @@ -1,579 +0,0 @@ - -S (owl.Owl_algodiff_primal_ops.S)

Module Owl_algodiff_primal_ops.S

include module type of struct include Owl_dense_ndarray.S end
include module type of struct include Owl_dense_ndarray_s end
type elt = float
type arr = - (float, Stdlib.Bigarray.float32_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
include Owl_dense_ndarray_intf.Common with type elt := elt and type arr := arr
include Owl_base_dense_ndarray_intf.Common - with type elt := elt - with type arr := arr
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
Create N-dimensional array
val linspace : elt -> elt -> int -> arr

linspace k 0. 9. 10 ...

val logspace : ?base:float -> elt -> elt -> int -> arr

logspace k 0. 9. 10 ...

val unit_basis : int -> int -> arr

unit_basis k n i returns a unit basis vector with ith element set to 1.

Obtain basic properties
val num_dims : arr -> int
val nth_dim : arr -> int -> int
val nnz : arr -> int
val density : arr -> float
val size_in_bytes : arr -> int
val same_shape : arr -> arr -> bool
val same_data : arr -> arr -> bool
val ind : arr -> int -> int array
val i1d : arr -> int array -> int
Manipulate a N-dimensional array
val get_index : arr -> int array array -> elt array
val set_index : arr -> int array array -> elt array -> unit
val get_fancy : Owl_types.index list -> arr -> arr
val set_fancy : Owl_types.index list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val sub_ndarray : int array -> arr -> arr array
val slice_left : arr -> int array -> arr
val fill : arr -> elt -> unit
val resize : ?head:bool -> arr -> int array -> arr
val flip : ?axis:int -> arr -> arr
val rotate : arr -> int -> arr
val swap : int -> int -> arr -> arr
val concat_vertical : arr -> arr -> arr
val concat_horizontal : arr -> arr -> arr
val concat_vh : arr array array -> arr
val split_vh : (int * int) array array -> arr -> arr array array
val dropout : ?rate:float -> arr -> arr
val top : arr -> int -> int array array
val bottom : arr -> int -> int array array
val sort : arr -> arr
val sort1 : ?axis:int -> arr -> arr
val argsort : - arr -> - (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val mmap : Unix.file_descr -> ?pos:int64 -> bool -> int array -> arr
Iterate array elements
val iter2i : (int -> elt -> elt -> unit) -> arr -> arr -> unit
val iter2 : (elt -> elt -> unit) -> arr -> arr -> unit
val map2i : (int -> elt -> elt -> elt) -> arr -> arr -> arr
val map2 : (elt -> elt -> elt) -> arr -> arr -> arr
val iteri_nd : (int array -> elt -> unit) -> arr -> unit
val mapi_nd : (int array -> elt -> elt) -> arr -> arr
val foldi_nd : - ?axis:int -> - (int array -> elt -> elt -> elt) -> - elt -> - arr -> - arr
val scani_nd : ?axis:int -> (int array -> elt -> elt -> elt) -> arr -> arr
val filteri_nd : (int array -> elt -> bool) -> arr -> int array array
val iter2i_nd : (int array -> elt -> elt -> unit) -> arr -> arr -> unit
val map2i_nd : (int array -> elt -> elt -> elt) -> arr -> arr -> arr
val iteri_slice : ?axis:int -> (int -> arr -> unit) -> arr -> unit
val iter_slice : ?axis:int -> (arr -> unit) -> arr -> unit
val mapi_slice : ?axis:int -> (int -> arr -> 'c) -> arr -> 'c array
val map_slice : ?axis:int -> (arr -> 'c) -> arr -> 'c array
val filteri_slice : ?axis:int -> (int -> arr -> bool) -> arr -> arr array
val filter_slice : ?axis:int -> (arr -> bool) -> arr -> arr array
val foldi_slice : ?axis:int -> (int -> 'c -> arr -> 'c) -> 'c -> arr -> 'c
val fold_slice : ?axis:int -> ('c -> arr -> 'c) -> 'c -> arr -> 'c
Examine array elements or compare two arrays
Input/Output functions
val to_array : arr -> elt array
val save : out:string -> arr -> unit
val load : string -> arr
val save_npy : out:string -> arr -> unit
val load_npy : string -> arr
Unary mathematical operations
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod' : arr -> elt
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean' : arr -> elt
val median' : arr -> elt
val median : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var' : arr -> elt
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std' : arr -> elt
val sem : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sem' : arr -> elt
val minmax : ?axis:int -> ?keep_dims:bool -> arr -> arr * arr
val minmax' : arr -> elt * elt
val min_i : arr -> elt * int array
val max_i : arr -> elt * int array
val minmax_i : arr -> (elt * int array) * (elt * int array)
val abs2 : arr -> arr
val conj : arr -> arr
val reci : arr -> arr
val reci_tol : ?tol:elt -> arr -> arr
val cbrt : arr -> arr
val exp2 : arr -> arr
val exp10 : arr -> arr
val expm1 : arr -> arr
val log1p : arr -> arr
val trunc : arr -> arr
val fix : arr -> arr
val modf : arr -> arr * arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> arr -> arr
val vecnorm : ?axis:int -> ?p:float -> ?keep_dims:bool -> arr -> arr
val vecnorm' : ?p:float -> arr -> elt
val cumsum : ?axis:int -> arr -> arr
val cumprod : ?axis:int -> arr -> arr
val cummin : ?axis:int -> arr -> arr
val cummax : ?axis:int -> arr -> arr
val diff : ?axis:int -> ?n:int -> arr -> arr
val lgamma : arr -> arr
Binary mathematical operations
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val ssqr' : arr -> elt -> elt
val ssqr_diff' : arr -> arr -> elt
Tensor Calculus
val contract1 : (int * int) array -> arr -> arr
val contract2 : (int * int) array -> arr -> arr -> arr
Experimental functions
val slide : ?axis:int -> ?ofs:int -> ?step:int -> window:int -> arr -> arr
Functions of in-place modification
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:float -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val sort_ : arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_fancy_ : out:arr -> Owl_types.index list -> arr -> unit
val set_fancy_ : out:arr -> Owl_types.index list -> arr -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val conj_ : ?out:arr -> arr -> unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit

Matrix functions

val col : arr -> int -> arr
val cols : arr -> int array -> arr
val to_arrays : arr -> elt array array
val draw_rows : ?replacement:bool -> arr -> int -> arr * int array
val draw_cols : ?replacement:bool -> arr -> int -> arr * int array
val draw_rows2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
val draw_cols2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
include Owl_dense_ndarray_intf.Real with type elt := elt and type arr := arr
include Owl_base_dense_ndarray_intf.Real - with type elt := elt - with type arr := arr
val sum_slices : ?axis:int -> arr -> arr
val signum : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val atan2 : arr -> arr -> arr
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> float -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> float -> arr
val dot : arr -> arr -> arr
val trace : arr -> elt
Helper functions
val float_to_elt : float -> elt
val elt_to_float : elt -> float
Real operations
val i0 : arr -> arr
val i0e : arr -> arr
val i1 : arr -> arr
val i1e : arr -> arr
val iv : v:arr -> arr -> arr
val scalar_iv : v:elt -> arr -> arr
val iv_scalar : v:arr -> elt -> arr
val j0 : arr -> arr
val j1 : arr -> arr
val jv : v:arr -> arr -> arr
val scalar_jv : v:elt -> arr -> arr
val jv_scalar : v:arr -> elt -> arr
val erf : arr -> arr
val erfc : arr -> arr
val logistic : arr -> arr
val elu : ?alpha:elt -> arr -> arr
val leaky_relu : ?alpha:elt -> arr -> arr
val softplus : arr -> arr
val softsign : arr -> arr
val softmax : ?axis:int -> arr -> arr
val sigmoid : arr -> arr
val log_sum_exp' : arr -> float
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val fmod_scalar : arr -> elt -> arr
val scalar_fmod : elt -> arr -> arr
val cross_entropy' : arr -> arr -> float
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
val poisson : mu:elt -> int array -> arr
val poisson_ : mu:elt -> out:arr -> unit
include Owl_dense_ndarray_intf.NN with type arr := arr
include Owl_base_dense_ndarray_intf.NN with type arr := arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val max_pool2d_argmax : - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - arr - * (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
include module type of struct include Owl_dense_ndarray.Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (%) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (%$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (+=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (.!{;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
include sig ... end
val (.%{}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
val mpow : Owl_linalg_s.mat -> float -> Owl_linalg_s.mat
module Scalar = Owl_maths
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_cblas/.dummy b/owl/Owl_cblas/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_cblas_basic/.dummy b/owl/Owl_cblas_basic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_cblas_generated/.dummy b/owl/Owl_cblas_generated/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_cluster/.dummy b/owl/Owl_cluster/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_core_types/.dummy b/owl/Owl_core_types/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dataset/.dummy b/owl/Owl_dataset/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense/.dummy b/owl/Owl_dense/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_matrix/.dummy b/owl/Owl_dense_matrix/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_matrix/C/index.html b/owl/Owl_dense_matrix/C/index.html deleted file mode 100644 index 921b5ca75..000000000 --- a/owl/Owl_dense_matrix/C/index.html +++ /dev/null @@ -1,215 +0,0 @@ - -C (owl.Owl_dense_matrix.C)

Module Owl_dense_matrix.C

include module type of struct include Owl_dense_matrix_c end
type elt = Stdlib.Complex.t
type mat = - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) Owl_dense_matrix_generic.t
type cast_mat = (float, Stdlib.Bigarray.float32_elt) Owl_dense_matrix_generic.t
include Owl_dense_matrix_intf.Common with type elt := elt and type mat := mat
Create dense matrices
val empty : int -> int -> mat
val create : int -> int -> elt -> mat
val init : int -> int -> (int -> elt) -> mat
val init_2d : int -> int -> (int -> int -> elt) -> mat
val zeros : int -> int -> mat
val ones : int -> int -> mat
val eye : int -> mat
val sequential : ?a:elt -> ?step:elt -> int -> int -> mat
val uniform : ?a:elt -> ?b:elt -> int -> int -> mat
val gaussian : ?mu:elt -> ?sigma:elt -> int -> int -> mat
val bernoulli : ?p:float -> int -> int -> mat
val unit_basis : int -> int -> mat
val diagm : ?k:int -> mat -> mat
val triu : ?k:int -> mat -> mat
val tril : ?k:int -> mat -> mat
val symmetric : ?upper:bool -> mat -> mat
val bidiagonal : ?upper:bool -> mat -> mat -> mat
val toeplitz : ?c:mat -> mat -> mat
val hankel : ?r:mat -> mat -> mat
val hadamard : int -> mat
val magic : int -> mat
Dense row vectors and meshgrids
val vector : int -> mat
val vector_zeros : int -> mat
val vector_ones : int -> mat
val vector_uniform : int -> mat
val linspace : elt -> elt -> int -> mat
val logspace : ?base:float -> elt -> elt -> int -> mat
val meshgrid : elt -> elt -> elt -> elt -> int -> int -> mat * mat
val meshup : mat -> mat -> mat * mat
Obtain the basic properties of a matrix
val shape : mat -> int * int
val row_num : mat -> int
val col_num : mat -> int
val numel : mat -> int
val nnz : mat -> int
val density : mat -> float
val size_in_bytes : mat -> int
val same_shape : mat -> mat -> bool
val same_data : mat -> mat -> bool
Manipulate a matrix
val get : mat -> int -> int -> elt
val set : mat -> int -> int -> elt -> unit
val get_index : mat -> int array array -> elt array
val set_index : mat -> int array array -> elt array -> unit
val get_fancy : Owl_types.index list -> mat -> mat
val set_fancy : Owl_types.index list -> mat -> mat -> unit
val get_slice : int list list -> mat -> mat
val set_slice : int list list -> mat -> mat -> unit
val row : mat -> int -> mat
val col : mat -> int -> mat
val rows : mat -> int array -> mat
val cols : mat -> int array -> mat
val resize : ?head:bool -> mat -> int array -> mat
val reshape : mat -> int array -> mat
val flatten : mat -> mat
val reverse : mat -> mat
val flip : ?axis:int -> mat -> mat
val rotate : mat -> int -> mat
val reset : mat -> unit
val fill : mat -> elt -> unit
val copy : mat -> mat
val copy_row_to : mat -> mat -> int -> unit
val copy_col_to : mat -> mat -> int -> unit
val concat_vertical : mat -> mat -> mat
val concat_horizontal : mat -> mat -> mat
val concat_vh : mat array array -> mat
val concatenate : ?axis:int -> mat array -> mat
val split : ?axis:int -> int array -> mat -> mat array
val split_vh : (int * int) array array -> mat -> mat array array
val transpose : mat -> mat
val ctranspose : mat -> mat
val diag : ?k:int -> mat -> mat
val swap_rows : mat -> int -> int -> unit
val swap_cols : mat -> int -> int -> unit
val tile : mat -> int array -> mat
val repeat : mat -> int array -> mat
val pad : ?v:elt -> int list list -> mat -> mat
val dropout : ?rate:float -> mat -> mat
val top : mat -> int -> int array array
val bottom : mat -> int -> int array array
val sort : mat -> mat
val argsort : - mat -> - (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
Iterate elements, columns, and rows.
val iteri : (int -> elt -> unit) -> mat -> unit
val iter : (elt -> unit) -> mat -> unit
val mapi : (int -> elt -> elt) -> mat -> mat
val map : (elt -> elt) -> mat -> mat
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> mat -> mat
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> mat -> mat
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> mat -> mat
val scan : ?axis:int -> (elt -> elt -> elt) -> mat -> mat
val filteri : (int -> elt -> bool) -> mat -> int array
val filter : (elt -> bool) -> mat -> int array
val iteri_2d : (int -> int -> elt -> unit) -> mat -> unit
val mapi_2d : (int -> int -> elt -> elt) -> mat -> mat
val foldi_2d : - ?axis:int -> - (int -> int -> elt -> elt -> elt) -> - elt -> - mat -> - mat
val scani_2d : ?axis:int -> (int -> int -> elt -> elt -> elt) -> mat -> mat
val filteri_2d : (int -> int -> elt -> bool) -> mat -> (int * int) array
val iter2i_2d : (int -> int -> elt -> elt -> unit) -> mat -> mat -> unit
val map2i_2d : (int -> int -> elt -> elt -> elt) -> mat -> mat -> mat
val iter2i : (int -> elt -> elt -> unit) -> mat -> mat -> unit
val iter2 : (elt -> elt -> unit) -> mat -> mat -> unit
val map2i : (int -> elt -> elt -> elt) -> mat -> mat -> mat
val map2 : (elt -> elt -> elt) -> mat -> mat -> mat
val iteri_rows : (int -> mat -> unit) -> mat -> unit
val iter_rows : (mat -> unit) -> mat -> unit
val iter2i_rows : (int -> mat -> mat -> unit) -> mat -> mat -> unit
val iter2_rows : (mat -> mat -> unit) -> mat -> mat -> unit
val iteri_cols : (int -> mat -> unit) -> mat -> unit
val iter_cols : (mat -> unit) -> mat -> unit
val filteri_rows : (int -> mat -> bool) -> mat -> int array
val filter_rows : (mat -> bool) -> mat -> int array
val filteri_cols : (int -> mat -> bool) -> mat -> int array
val filter_cols : (mat -> bool) -> mat -> int array
val fold_rows : ('a -> mat -> 'a) -> 'a -> mat -> 'a
val fold_cols : ('a -> mat -> 'a) -> 'a -> mat -> 'a
val mapi_rows : (int -> mat -> 'a) -> mat -> 'a array
val map_rows : (mat -> 'a) -> mat -> 'a array
val mapi_cols : (int -> mat -> 'a) -> mat -> 'a array
val map_cols : (mat -> 'a) -> mat -> 'a array
val mapi_by_row : int -> (int -> mat -> mat) -> mat -> mat
val map_by_row : int -> (mat -> mat) -> mat -> mat
val mapi_by_col : int -> (int -> mat -> mat) -> mat -> mat
val map_by_col : int -> (mat -> mat) -> mat -> mat
val mapi_at_row : (int -> elt -> elt) -> mat -> int -> mat
val map_at_row : (elt -> elt) -> mat -> int -> mat
val mapi_at_col : (int -> elt -> elt) -> mat -> int -> mat
val map_at_col : (elt -> elt) -> mat -> int -> mat
Examine elements and compare two matrices
val exists : (elt -> bool) -> mat -> bool
val not_exists : (elt -> bool) -> mat -> bool
val for_all : (elt -> bool) -> mat -> bool
val is_zero : mat -> bool
val is_positive : mat -> bool
val is_negative : mat -> bool
val is_nonpositive : mat -> bool
val is_nonnegative : mat -> bool
val is_normal : mat -> bool
val not_nan : mat -> bool
val not_inf : mat -> bool
val equal : mat -> mat -> bool
val not_equal : mat -> mat -> bool
val greater : mat -> mat -> bool
val less : mat -> mat -> bool
val greater_equal : mat -> mat -> bool
val less_equal : mat -> mat -> bool
val elt_equal : mat -> mat -> mat
val elt_not_equal : mat -> mat -> mat
val elt_less : mat -> mat -> mat
val elt_greater : mat -> mat -> mat
val elt_less_equal : mat -> mat -> mat
val elt_greater_equal : mat -> mat -> mat
val equal_scalar : mat -> elt -> bool
val not_equal_scalar : mat -> elt -> bool
val less_scalar : mat -> elt -> bool
val greater_scalar : mat -> elt -> bool
val less_equal_scalar : mat -> elt -> bool
val greater_equal_scalar : mat -> elt -> bool
val elt_equal_scalar : mat -> elt -> mat
val elt_not_equal_scalar : mat -> elt -> mat
val elt_less_scalar : mat -> elt -> mat
val elt_greater_scalar : mat -> elt -> mat
val elt_less_equal_scalar : mat -> elt -> mat
val elt_greater_equal_scalar : mat -> elt -> mat
val approx_equal : ?eps:float -> mat -> mat -> bool
val approx_equal_scalar : ?eps:float -> mat -> elt -> bool
val approx_elt_equal : ?eps:float -> mat -> mat -> mat
val approx_elt_equal_scalar : ?eps:float -> mat -> elt -> mat
Randomisation functions
val draw_rows : ?replacement:bool -> mat -> int -> mat * int array
val draw_cols : ?replacement:bool -> mat -> int -> mat * int array
val draw_rows2 : - ?replacement:bool -> - mat -> - mat -> - int -> - mat * mat * int array
val draw_cols2 : - ?replacement:bool -> - mat -> - mat -> - int -> - mat * mat * int array
val shuffle_rows : mat -> mat
val shuffle_cols : mat -> mat
val shuffle : mat -> mat
Input/Output and helper functions
val to_array : mat -> elt array
val of_array : elt array -> int -> int -> mat
val to_arrays : mat -> elt array array
val of_arrays : elt array array -> mat
val to_rows : mat -> mat array
val of_rows : mat array -> mat
val to_cols : mat -> mat array
val of_cols : mat array -> mat
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - mat -> - unit
val save : out:string -> mat -> unit
val load : string -> mat
val save_txt : ?sep:string -> ?append:bool -> out:string -> mat -> unit
val load_txt : ?sep:string -> string -> mat
val save_npy : out:string -> mat -> unit
val load_npy : string -> mat
Unary mathematical operations
val min : ?axis:int -> ?keep_dims:bool -> mat -> mat
val min' : mat -> elt
val max : ?axis:int -> ?keep_dims:bool -> mat -> mat
val max' : mat -> elt
val minmax : ?axis:int -> ?keep_dims:bool -> mat -> mat * mat
val minmax' : mat -> elt * elt
val min_i : mat -> elt * int array
val max_i : mat -> elt * int array
val minmax_i : mat -> (elt * int array) * (elt * int array)
val trace : mat -> elt
val sum : ?axis:int -> ?keep_dims:bool -> mat -> mat
val sum' : mat -> elt
val prod : ?axis:int -> ?keep_dims:bool -> mat -> mat
val prod' : mat -> elt
val mean : ?axis:int -> ?keep_dims:bool -> mat -> mat
val mean' : mat -> elt
val var' : mat -> elt
val std' : mat -> elt
val sem : ?axis:int -> ?keep_dims:bool -> mat -> mat
val sem' : mat -> elt
val sum_rows : ?keep_dims:bool -> mat -> mat
val sum_cols : ?keep_dims:bool -> mat -> mat
val mean_rows : ?keep_dims:bool -> mat -> mat
val mean_cols : ?keep_dims:bool -> mat -> mat
val abs : mat -> mat
val abs2 : mat -> mat
val conj : mat -> mat
val neg : mat -> mat
val reci : mat -> mat
val reci_tol : ?tol:elt -> mat -> mat
val sqr : mat -> mat
val sqrt : mat -> mat
val cbrt : mat -> mat
val exp : mat -> mat
val exp2 : mat -> mat
val exp10 : mat -> mat
val expm1 : mat -> mat
val log : mat -> mat
val log10 : mat -> mat
val log2 : mat -> mat
val log1p : mat -> mat
val sin : mat -> mat
val cos : mat -> mat
val tan : mat -> mat
val asin : mat -> mat
val acos : mat -> mat
val atan : mat -> mat
val sinh : mat -> mat
val cosh : mat -> mat
val tanh : mat -> mat
val asinh : mat -> mat
val acosh : mat -> mat
val atanh : mat -> mat
val floor : mat -> mat
val ceil : mat -> mat
val round : mat -> mat
val trunc : mat -> mat
val fix : mat -> mat
val modf : mat -> mat * mat
val l1norm : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l1norm' : mat -> elt
val l2norm : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l2norm' : mat -> elt
val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l2norm_sqr' : mat -> elt
val vecnorm : ?axis:int -> ?p:float -> ?keep_dims:bool -> mat -> mat
val vecnorm' : ?p:float -> mat -> elt
val cumsum : ?axis:int -> mat -> mat
val cumprod : ?axis:int -> mat -> mat
val cummin : ?axis:int -> mat -> mat
val cummax : ?axis:int -> mat -> mat
val diff : ?axis:int -> ?n:int -> mat -> mat
val var : ?axis:int -> ?keep_dims:bool -> mat -> mat
val std : ?axis:int -> ?keep_dims:bool -> mat -> mat
val mat2gray : ?amin:elt -> ?amax:elt -> mat -> mat
val lgamma : mat -> mat
val dawsn : mat -> mat
Binary mathematical operations
val add : mat -> mat -> mat
val sub : mat -> mat -> mat
val mul : mat -> mat -> mat
val div : mat -> mat -> mat
val add_scalar : mat -> elt -> mat
val sub_scalar : mat -> elt -> mat
val mul_scalar : mat -> elt -> mat
val div_scalar : mat -> elt -> mat
val scalar_add : elt -> mat -> mat
val scalar_sub : elt -> mat -> mat
val scalar_mul : elt -> mat -> mat
val scalar_div : elt -> mat -> mat
val dot : mat -> mat -> mat
val add_diag : mat -> elt -> mat
val pow : mat -> mat -> mat
val scalar_pow : elt -> mat -> mat
val pow_scalar : mat -> elt -> mat
val min2 : mat -> mat -> mat
val max2 : mat -> mat -> mat
val ssqr' : mat -> elt -> elt
val ssqr_diff' : mat -> mat -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> mat -> mat
val cov : ?b:mat -> a:mat -> mat
val kron : mat -> mat -> mat
val fma : mat -> mat -> mat -> mat
Functions of in-place modification
val create_ : out:mat -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:mat -> unit
val bernoulli_ : ?p:float -> out:mat -> unit
val zeros_ : out:mat -> unit
val ones_ : out:mat -> unit
val sort_ : mat -> unit
val one_hot_ : out:mat -> int -> mat -> unit
val copy_ : out:mat -> mat -> unit
val reshape_ : out:mat -> mat -> unit
val transpose_ : out:mat -> ?axis:int array -> mat -> unit
val sum_ : out:mat -> axis:int -> mat -> unit
val min_ : out:mat -> axis:int -> mat -> unit
val max_ : out:mat -> axis:int -> mat -> unit
val add_ : ?out:mat -> mat -> mat -> unit
val sub_ : ?out:mat -> mat -> mat -> unit
val mul_ : ?out:mat -> mat -> mat -> unit
val div_ : ?out:mat -> mat -> mat -> unit
val pow_ : ?out:mat -> mat -> mat -> unit
val atan2_ : ?out:mat -> mat -> mat -> unit
val hypot_ : ?out:mat -> mat -> mat -> unit
val fmod_ : ?out:mat -> mat -> mat -> unit
val min2_ : ?out:mat -> mat -> mat -> unit
val max2_ : ?out:mat -> mat -> mat -> unit
val add_scalar_ : ?out:mat -> mat -> elt -> unit
val sub_scalar_ : ?out:mat -> mat -> elt -> unit
val mul_scalar_ : ?out:mat -> mat -> elt -> unit
val div_scalar_ : ?out:mat -> mat -> elt -> unit
val pow_scalar_ : ?out:mat -> mat -> elt -> unit
val atan2_scalar_ : ?out:mat -> mat -> elt -> unit
val fmod_scalar_ : ?out:mat -> mat -> elt -> unit
val scalar_add_ : ?out:mat -> elt -> mat -> unit
val scalar_sub_ : ?out:mat -> elt -> mat -> unit
val scalar_mul_ : ?out:mat -> elt -> mat -> unit
val scalar_div_ : ?out:mat -> elt -> mat -> unit
val scalar_pow_ : ?out:mat -> elt -> mat -> unit
val scalar_atan2_ : ?out:mat -> elt -> mat -> unit
val scalar_fmod_ : ?out:mat -> elt -> mat -> unit
val fma_ : ?out:mat -> mat -> mat -> mat -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:mat -> - mat -> - mat -> - unit
val conj_ : ?out:mat -> mat -> unit
val abs_ : ?out:mat -> mat -> unit
val neg_ : ?out:mat -> mat -> unit
val reci_ : ?out:mat -> mat -> unit
val signum_ : ?out:mat -> mat -> unit
val sqr_ : ?out:mat -> mat -> unit
val sqrt_ : ?out:mat -> mat -> unit
val cbrt_ : ?out:mat -> mat -> unit
val exp_ : ?out:mat -> mat -> unit
val exp2_ : ?out:mat -> mat -> unit
val exp10_ : ?out:mat -> mat -> unit
val expm1_ : ?out:mat -> mat -> unit
val log_ : ?out:mat -> mat -> unit
val log2_ : ?out:mat -> mat -> unit
val log10_ : ?out:mat -> mat -> unit
val log1p_ : ?out:mat -> mat -> unit
val sin_ : ?out:mat -> mat -> unit
val cos_ : ?out:mat -> mat -> unit
val tan_ : ?out:mat -> mat -> unit
val asin_ : ?out:mat -> mat -> unit
val acos_ : ?out:mat -> mat -> unit
val atan_ : ?out:mat -> mat -> unit
val sinh_ : ?out:mat -> mat -> unit
val cosh_ : ?out:mat -> mat -> unit
val tanh_ : ?out:mat -> mat -> unit
val asinh_ : ?out:mat -> mat -> unit
val acosh_ : ?out:mat -> mat -> unit
val atanh_ : ?out:mat -> mat -> unit
val floor_ : ?out:mat -> mat -> unit
val ceil_ : ?out:mat -> mat -> unit
val round_ : ?out:mat -> mat -> unit
val trunc_ : ?out:mat -> mat -> unit
val fix_ : ?out:mat -> mat -> unit
val erf_ : ?out:mat -> mat -> unit
val erfc_ : ?out:mat -> mat -> unit
val relu_ : ?out:mat -> mat -> unit
val softplus_ : ?out:mat -> mat -> unit
val softsign_ : ?out:mat -> mat -> unit
val sigmoid_ : ?out:mat -> mat -> unit
val softmax_ : ?out:mat -> ?axis:int -> mat -> unit
val cumsum_ : ?out:mat -> ?axis:int -> mat -> unit
val cumprod_ : ?out:mat -> ?axis:int -> mat -> unit
val cummin_ : ?out:mat -> ?axis:int -> mat -> unit
val cummax_ : ?out:mat -> ?axis:int -> mat -> unit
val dropout_ : ?out:mat -> ?rate:float -> mat -> unit
val elt_equal_ : ?out:mat -> mat -> mat -> unit
val elt_not_equal_ : ?out:mat -> mat -> mat -> unit
val elt_less_ : ?out:mat -> mat -> mat -> unit
val elt_greater_ : ?out:mat -> mat -> mat -> unit
val elt_less_equal_ : ?out:mat -> mat -> mat -> unit
val elt_greater_equal_ : ?out:mat -> mat -> mat -> unit
val elt_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_not_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_less_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_greater_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_less_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_greater_equal_scalar_ : ?out:mat -> mat -> elt -> unit
include Owl_dense_matrix_intf.Complex - with type mat := mat - and type cast_mat := cast_mat
Specific complex functions
val complex : cast_mat -> cast_mat -> mat
val polar : cast_mat -> cast_mat -> mat
val re : mat -> cast_mat
val im : mat -> cast_mat
include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (-$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (*$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (/$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (!=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<>.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (%) : - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val (%$) : - (float, 'a) Owl_dense_matrix_generic.t -> - float -> - (float, 'a) Owl_dense_matrix_generic.t
val (**) : - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val (**$) : - (float, 'a) Owl_dense_matrix_generic.t -> - float -> - (float, 'a) Owl_dense_matrix_generic.t
val (+=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (@=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (@||) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.!{;..}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list array -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list array -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
include sig ... end
val (*@) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.%{}) : ('a, 'b) Owl_dense_matrix_generic.t -> (int * int) -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_matrix_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_matrix_generic.t -> (int * int) -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int array -> - 'a -> - unit
include sig ... end
val (**@) : - ('a, 'b) Owl_linalg_generic.t -> - float -> - ('a, 'b) Owl_linalg_generic.t
val (/@) : - ('a, 'b) Owl_linalg_generic.t -> - ('a, 'b) Owl_linalg_generic.t -> - ('a, 'b) Owl_linalg_generic.t
val mpow : Owl_linalg_c.mat -> float -> Owl_linalg_c.mat
\ No newline at end of file diff --git a/owl/Owl_dense_matrix/D/index.html b/owl/Owl_dense_matrix/D/index.html deleted file mode 100644 index 0dfe0c3b3..000000000 --- a/owl/Owl_dense_matrix/D/index.html +++ /dev/null @@ -1,225 +0,0 @@ - -D (owl.Owl_dense_matrix.D)

Module Owl_dense_matrix.D

include module type of struct include Owl_dense_matrix_d end
type elt = float
type mat = (float, Stdlib.Bigarray.float64_elt) Owl_dense_matrix_generic.t
include Owl_dense_matrix_intf.Common with type elt := elt and type mat := mat
Create dense matrices
val empty : int -> int -> mat
val create : int -> int -> elt -> mat
val init : int -> int -> (int -> elt) -> mat
val init_2d : int -> int -> (int -> int -> elt) -> mat
val zeros : int -> int -> mat
val ones : int -> int -> mat
val eye : int -> mat
val sequential : ?a:elt -> ?step:elt -> int -> int -> mat
val uniform : ?a:elt -> ?b:elt -> int -> int -> mat
val gaussian : ?mu:elt -> ?sigma:elt -> int -> int -> mat
val bernoulli : ?p:float -> int -> int -> mat
val unit_basis : int -> int -> mat
val diagm : ?k:int -> mat -> mat
val triu : ?k:int -> mat -> mat
val tril : ?k:int -> mat -> mat
val symmetric : ?upper:bool -> mat -> mat
val bidiagonal : ?upper:bool -> mat -> mat -> mat
val toeplitz : ?c:mat -> mat -> mat
val hankel : ?r:mat -> mat -> mat
val hadamard : int -> mat
val magic : int -> mat
Dense row vectors and meshgrids
val vector : int -> mat
val vector_zeros : int -> mat
val vector_ones : int -> mat
val vector_uniform : int -> mat
val linspace : elt -> elt -> int -> mat
val logspace : ?base:float -> elt -> elt -> int -> mat
val meshgrid : elt -> elt -> elt -> elt -> int -> int -> mat * mat
val meshup : mat -> mat -> mat * mat
Obtain the basic properties of a matrix
val shape : mat -> int * int
val row_num : mat -> int
val col_num : mat -> int
val numel : mat -> int
val nnz : mat -> int
val density : mat -> float
val size_in_bytes : mat -> int
val same_shape : mat -> mat -> bool
val same_data : mat -> mat -> bool
Manipulate a matrix
val get : mat -> int -> int -> elt
val set : mat -> int -> int -> elt -> unit
val get_index : mat -> int array array -> elt array
val set_index : mat -> int array array -> elt array -> unit
val get_fancy : Owl_types.index list -> mat -> mat
val set_fancy : Owl_types.index list -> mat -> mat -> unit
val get_slice : int list list -> mat -> mat
val set_slice : int list list -> mat -> mat -> unit
val row : mat -> int -> mat
val col : mat -> int -> mat
val rows : mat -> int array -> mat
val cols : mat -> int array -> mat
val resize : ?head:bool -> mat -> int array -> mat
val reshape : mat -> int array -> mat
val flatten : mat -> mat
val reverse : mat -> mat
val flip : ?axis:int -> mat -> mat
val rotate : mat -> int -> mat
val reset : mat -> unit
val fill : mat -> elt -> unit
val copy : mat -> mat
val copy_row_to : mat -> mat -> int -> unit
val copy_col_to : mat -> mat -> int -> unit
val concat_vertical : mat -> mat -> mat
val concat_horizontal : mat -> mat -> mat
val concat_vh : mat array array -> mat
val concatenate : ?axis:int -> mat array -> mat
val split : ?axis:int -> int array -> mat -> mat array
val split_vh : (int * int) array array -> mat -> mat array array
val transpose : mat -> mat
val ctranspose : mat -> mat
val swap_rows : mat -> int -> int -> unit
val swap_cols : mat -> int -> int -> unit
val tile : mat -> int array -> mat
val repeat : mat -> int array -> mat
val pad : ?v:elt -> int list list -> mat -> mat
val dropout : ?rate:float -> mat -> mat
val top : mat -> int -> int array array
val bottom : mat -> int -> int array array
val sort : mat -> mat
val argsort : - mat -> - (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
Iterate elements, columns, and rows.
val iteri : (int -> elt -> unit) -> mat -> unit
val iter : (elt -> unit) -> mat -> unit
val mapi : (int -> elt -> elt) -> mat -> mat
val map : (elt -> elt) -> mat -> mat
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> mat -> mat
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> mat -> mat
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> mat -> mat
val scan : ?axis:int -> (elt -> elt -> elt) -> mat -> mat
val filteri : (int -> elt -> bool) -> mat -> int array
val filter : (elt -> bool) -> mat -> int array
val iteri_2d : (int -> int -> elt -> unit) -> mat -> unit
val mapi_2d : (int -> int -> elt -> elt) -> mat -> mat
val foldi_2d : - ?axis:int -> - (int -> int -> elt -> elt -> elt) -> - elt -> - mat -> - mat
val scani_2d : ?axis:int -> (int -> int -> elt -> elt -> elt) -> mat -> mat
val filteri_2d : (int -> int -> elt -> bool) -> mat -> (int * int) array
val iter2i_2d : (int -> int -> elt -> elt -> unit) -> mat -> mat -> unit
val map2i_2d : (int -> int -> elt -> elt -> elt) -> mat -> mat -> mat
val iter2i : (int -> elt -> elt -> unit) -> mat -> mat -> unit
val iter2 : (elt -> elt -> unit) -> mat -> mat -> unit
val map2i : (int -> elt -> elt -> elt) -> mat -> mat -> mat
val map2 : (elt -> elt -> elt) -> mat -> mat -> mat
val iteri_rows : (int -> mat -> unit) -> mat -> unit
val iter_rows : (mat -> unit) -> mat -> unit
val iter2i_rows : (int -> mat -> mat -> unit) -> mat -> mat -> unit
val iter2_rows : (mat -> mat -> unit) -> mat -> mat -> unit
val iteri_cols : (int -> mat -> unit) -> mat -> unit
val iter_cols : (mat -> unit) -> mat -> unit
val filteri_rows : (int -> mat -> bool) -> mat -> int array
val filter_rows : (mat -> bool) -> mat -> int array
val filteri_cols : (int -> mat -> bool) -> mat -> int array
val filter_cols : (mat -> bool) -> mat -> int array
val fold_rows : ('a -> mat -> 'a) -> 'a -> mat -> 'a
val fold_cols : ('a -> mat -> 'a) -> 'a -> mat -> 'a
val mapi_rows : (int -> mat -> 'a) -> mat -> 'a array
val map_rows : (mat -> 'a) -> mat -> 'a array
val mapi_cols : (int -> mat -> 'a) -> mat -> 'a array
val map_cols : (mat -> 'a) -> mat -> 'a array
val mapi_by_row : int -> (int -> mat -> mat) -> mat -> mat
val map_by_row : int -> (mat -> mat) -> mat -> mat
val mapi_by_col : int -> (int -> mat -> mat) -> mat -> mat
val map_by_col : int -> (mat -> mat) -> mat -> mat
val mapi_at_row : (int -> elt -> elt) -> mat -> int -> mat
val map_at_row : (elt -> elt) -> mat -> int -> mat
val mapi_at_col : (int -> elt -> elt) -> mat -> int -> mat
val map_at_col : (elt -> elt) -> mat -> int -> mat
Examine elements and compare two matrices
val exists : (elt -> bool) -> mat -> bool
val not_exists : (elt -> bool) -> mat -> bool
val for_all : (elt -> bool) -> mat -> bool
val is_zero : mat -> bool
val is_positive : mat -> bool
val is_negative : mat -> bool
val is_nonpositive : mat -> bool
val is_nonnegative : mat -> bool
val is_normal : mat -> bool
val not_nan : mat -> bool
val not_inf : mat -> bool
val equal : mat -> mat -> bool
val not_equal : mat -> mat -> bool
val greater : mat -> mat -> bool
val less : mat -> mat -> bool
val greater_equal : mat -> mat -> bool
val less_equal : mat -> mat -> bool
val elt_equal : mat -> mat -> mat
val elt_not_equal : mat -> mat -> mat
val elt_less : mat -> mat -> mat
val elt_greater : mat -> mat -> mat
val elt_less_equal : mat -> mat -> mat
val elt_greater_equal : mat -> mat -> mat
val equal_scalar : mat -> elt -> bool
val not_equal_scalar : mat -> elt -> bool
val less_scalar : mat -> elt -> bool
val greater_scalar : mat -> elt -> bool
val less_equal_scalar : mat -> elt -> bool
val greater_equal_scalar : mat -> elt -> bool
val elt_equal_scalar : mat -> elt -> mat
val elt_not_equal_scalar : mat -> elt -> mat
val elt_less_scalar : mat -> elt -> mat
val elt_greater_scalar : mat -> elt -> mat
val elt_less_equal_scalar : mat -> elt -> mat
val elt_greater_equal_scalar : mat -> elt -> mat
val approx_equal : ?eps:float -> mat -> mat -> bool
val approx_equal_scalar : ?eps:float -> mat -> elt -> bool
val approx_elt_equal : ?eps:float -> mat -> mat -> mat
val approx_elt_equal_scalar : ?eps:float -> mat -> elt -> mat
Randomisation functions
val draw_rows : ?replacement:bool -> mat -> int -> mat * int array
val draw_cols : ?replacement:bool -> mat -> int -> mat * int array
val draw_rows2 : - ?replacement:bool -> - mat -> - mat -> - int -> - mat * mat * int array
val draw_cols2 : - ?replacement:bool -> - mat -> - mat -> - int -> - mat * mat * int array
val shuffle_rows : mat -> mat
val shuffle_cols : mat -> mat
val shuffle : mat -> mat
Input/Output and helper functions
val to_array : mat -> elt array
val of_array : elt array -> int -> int -> mat
val to_arrays : mat -> elt array array
val of_arrays : elt array array -> mat
val to_rows : mat -> mat array
val of_rows : mat array -> mat
val to_cols : mat -> mat array
val of_cols : mat array -> mat
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - mat -> - unit
val save : out:string -> mat -> unit
val load : string -> mat
val save_txt : ?sep:string -> ?append:bool -> out:string -> mat -> unit
val load_txt : ?sep:string -> string -> mat
val save_npy : out:string -> mat -> unit
val load_npy : string -> mat
Unary mathematical operations
val min : ?axis:int -> ?keep_dims:bool -> mat -> mat
val min' : mat -> elt
val max : ?axis:int -> ?keep_dims:bool -> mat -> mat
val max' : mat -> elt
val minmax : ?axis:int -> ?keep_dims:bool -> mat -> mat * mat
val minmax' : mat -> elt * elt
val min_i : mat -> elt * int array
val max_i : mat -> elt * int array
val minmax_i : mat -> (elt * int array) * (elt * int array)
val trace : mat -> elt
val sum : ?axis:int -> ?keep_dims:bool -> mat -> mat
val sum' : mat -> elt
val prod : ?axis:int -> ?keep_dims:bool -> mat -> mat
val prod' : mat -> elt
val mean : ?axis:int -> ?keep_dims:bool -> mat -> mat
val mean' : mat -> elt
val var' : mat -> elt
val std' : mat -> elt
val sem : ?axis:int -> ?keep_dims:bool -> mat -> mat
val sem' : mat -> elt
val sum_rows : ?keep_dims:bool -> mat -> mat
val sum_cols : ?keep_dims:bool -> mat -> mat
val mean_rows : ?keep_dims:bool -> mat -> mat
val mean_cols : ?keep_dims:bool -> mat -> mat
val abs : mat -> mat
val abs2 : mat -> mat
val conj : mat -> mat
val neg : mat -> mat
val reci : mat -> mat
val reci_tol : ?tol:elt -> mat -> mat
val sqr : mat -> mat
val sqrt : mat -> mat
val cbrt : mat -> mat
val exp : mat -> mat
val exp2 : mat -> mat
val exp10 : mat -> mat
val expm1 : mat -> mat
val log : mat -> mat
val log10 : mat -> mat
val log2 : mat -> mat
val log1p : mat -> mat
val sin : mat -> mat
val cos : mat -> mat
val tan : mat -> mat
val asin : mat -> mat
val acos : mat -> mat
val atan : mat -> mat
val sinh : mat -> mat
val cosh : mat -> mat
val tanh : mat -> mat
val asinh : mat -> mat
val acosh : mat -> mat
val atanh : mat -> mat
val floor : mat -> mat
val ceil : mat -> mat
val round : mat -> mat
val trunc : mat -> mat
val fix : mat -> mat
val modf : mat -> mat * mat
val l1norm : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l1norm' : mat -> elt
val l2norm : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l2norm' : mat -> elt
val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l2norm_sqr' : mat -> elt
val vecnorm : ?axis:int -> ?p:float -> ?keep_dims:bool -> mat -> mat
val vecnorm' : ?p:float -> mat -> elt
val cumsum : ?axis:int -> mat -> mat
val cumprod : ?axis:int -> mat -> mat
val cummin : ?axis:int -> mat -> mat
val cummax : ?axis:int -> mat -> mat
val diff : ?axis:int -> ?n:int -> mat -> mat
val var : ?axis:int -> ?keep_dims:bool -> mat -> mat
val std : ?axis:int -> ?keep_dims:bool -> mat -> mat
val mat2gray : ?amin:elt -> ?amax:elt -> mat -> mat
val lgamma : mat -> mat
val dawsn : mat -> mat
Binary mathematical operations
val add : mat -> mat -> mat
val sub : mat -> mat -> mat
val mul : mat -> mat -> mat
val div : mat -> mat -> mat
val add_scalar : mat -> elt -> mat
val sub_scalar : mat -> elt -> mat
val mul_scalar : mat -> elt -> mat
val div_scalar : mat -> elt -> mat
val scalar_add : elt -> mat -> mat
val scalar_sub : elt -> mat -> mat
val scalar_mul : elt -> mat -> mat
val scalar_div : elt -> mat -> mat
val dot : mat -> mat -> mat
val add_diag : mat -> elt -> mat
val pow : mat -> mat -> mat
val scalar_pow : elt -> mat -> mat
val pow_scalar : mat -> elt -> mat
val min2 : mat -> mat -> mat
val max2 : mat -> mat -> mat
val ssqr' : mat -> elt -> elt
val ssqr_diff' : mat -> mat -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> mat -> mat
val cov : ?b:mat -> a:mat -> mat
val kron : mat -> mat -> mat
val fma : mat -> mat -> mat -> mat
Functions of in-place modification
val create_ : out:mat -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:mat -> unit
val bernoulli_ : ?p:float -> out:mat -> unit
val zeros_ : out:mat -> unit
val ones_ : out:mat -> unit
val sort_ : mat -> unit
val one_hot_ : out:mat -> int -> mat -> unit
val copy_ : out:mat -> mat -> unit
val reshape_ : out:mat -> mat -> unit
val transpose_ : out:mat -> ?axis:int array -> mat -> unit
val sum_ : out:mat -> axis:int -> mat -> unit
val min_ : out:mat -> axis:int -> mat -> unit
val max_ : out:mat -> axis:int -> mat -> unit
val add_ : ?out:mat -> mat -> mat -> unit
val sub_ : ?out:mat -> mat -> mat -> unit
val mul_ : ?out:mat -> mat -> mat -> unit
val div_ : ?out:mat -> mat -> mat -> unit
val pow_ : ?out:mat -> mat -> mat -> unit
val atan2_ : ?out:mat -> mat -> mat -> unit
val hypot_ : ?out:mat -> mat -> mat -> unit
val fmod_ : ?out:mat -> mat -> mat -> unit
val min2_ : ?out:mat -> mat -> mat -> unit
val max2_ : ?out:mat -> mat -> mat -> unit
val add_scalar_ : ?out:mat -> mat -> elt -> unit
val sub_scalar_ : ?out:mat -> mat -> elt -> unit
val mul_scalar_ : ?out:mat -> mat -> elt -> unit
val div_scalar_ : ?out:mat -> mat -> elt -> unit
val pow_scalar_ : ?out:mat -> mat -> elt -> unit
val atan2_scalar_ : ?out:mat -> mat -> elt -> unit
val fmod_scalar_ : ?out:mat -> mat -> elt -> unit
val scalar_add_ : ?out:mat -> elt -> mat -> unit
val scalar_sub_ : ?out:mat -> elt -> mat -> unit
val scalar_mul_ : ?out:mat -> elt -> mat -> unit
val scalar_div_ : ?out:mat -> elt -> mat -> unit
val scalar_pow_ : ?out:mat -> elt -> mat -> unit
val scalar_atan2_ : ?out:mat -> elt -> mat -> unit
val scalar_fmod_ : ?out:mat -> elt -> mat -> unit
val fma_ : ?out:mat -> mat -> mat -> mat -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:mat -> - mat -> - mat -> - unit
val conj_ : ?out:mat -> mat -> unit
val abs_ : ?out:mat -> mat -> unit
val neg_ : ?out:mat -> mat -> unit
val reci_ : ?out:mat -> mat -> unit
val signum_ : ?out:mat -> mat -> unit
val sqr_ : ?out:mat -> mat -> unit
val sqrt_ : ?out:mat -> mat -> unit
val cbrt_ : ?out:mat -> mat -> unit
val exp_ : ?out:mat -> mat -> unit
val exp2_ : ?out:mat -> mat -> unit
val exp10_ : ?out:mat -> mat -> unit
val expm1_ : ?out:mat -> mat -> unit
val log_ : ?out:mat -> mat -> unit
val log2_ : ?out:mat -> mat -> unit
val log10_ : ?out:mat -> mat -> unit
val log1p_ : ?out:mat -> mat -> unit
val sin_ : ?out:mat -> mat -> unit
val cos_ : ?out:mat -> mat -> unit
val tan_ : ?out:mat -> mat -> unit
val asin_ : ?out:mat -> mat -> unit
val acos_ : ?out:mat -> mat -> unit
val atan_ : ?out:mat -> mat -> unit
val sinh_ : ?out:mat -> mat -> unit
val cosh_ : ?out:mat -> mat -> unit
val tanh_ : ?out:mat -> mat -> unit
val asinh_ : ?out:mat -> mat -> unit
val acosh_ : ?out:mat -> mat -> unit
val atanh_ : ?out:mat -> mat -> unit
val floor_ : ?out:mat -> mat -> unit
val ceil_ : ?out:mat -> mat -> unit
val round_ : ?out:mat -> mat -> unit
val trunc_ : ?out:mat -> mat -> unit
val fix_ : ?out:mat -> mat -> unit
val erf_ : ?out:mat -> mat -> unit
val erfc_ : ?out:mat -> mat -> unit
val relu_ : ?out:mat -> mat -> unit
val softplus_ : ?out:mat -> mat -> unit
val softsign_ : ?out:mat -> mat -> unit
val sigmoid_ : ?out:mat -> mat -> unit
val softmax_ : ?out:mat -> ?axis:int -> mat -> unit
val cumsum_ : ?out:mat -> ?axis:int -> mat -> unit
val cumprod_ : ?out:mat -> ?axis:int -> mat -> unit
val cummin_ : ?out:mat -> ?axis:int -> mat -> unit
val cummax_ : ?out:mat -> ?axis:int -> mat -> unit
val dropout_ : ?out:mat -> ?rate:float -> mat -> unit
val elt_equal_ : ?out:mat -> mat -> mat -> unit
val elt_not_equal_ : ?out:mat -> mat -> mat -> unit
val elt_less_ : ?out:mat -> mat -> mat -> unit
val elt_greater_ : ?out:mat -> mat -> mat -> unit
val elt_less_equal_ : ?out:mat -> mat -> mat -> unit
val elt_greater_equal_ : ?out:mat -> mat -> mat -> unit
val elt_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_not_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_less_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_greater_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_less_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_greater_equal_scalar_ : ?out:mat -> mat -> elt -> unit
include Owl_dense_matrix_intf.Real with type elt := elt and type mat := mat
Specific real functions
val i0 : mat -> mat
val i0e : mat -> mat
val i1 : mat -> mat
val i1e : mat -> mat
val iv : v:mat -> mat -> mat
val scalar_iv : v:elt -> mat -> mat
val iv_scalar : v:mat -> elt -> mat
val j0 : mat -> mat
val j1 : mat -> mat
val jv : v:mat -> mat -> mat
val scalar_jv : v:elt -> mat -> mat
val jv_scalar : v:mat -> elt -> mat
val semidef : int -> mat
val min_rows : mat -> (elt * int * int) array
val min_cols : mat -> (elt * int * int) array
val max_rows : mat -> (elt * int * int) array
val max_cols : mat -> (elt * int * int) array
val signum : mat -> mat
val erf : mat -> mat
val erfc : mat -> mat
val logistic : mat -> mat
val relu : mat -> mat
val elu : ?alpha:elt -> mat -> mat
val leaky_relu : ?alpha:elt -> mat -> mat
val softplus : mat -> mat
val softsign : mat -> mat
val softmax : ?axis:int -> mat -> mat
val sigmoid : mat -> mat
val log_sum_exp' : mat -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> mat -> mat
val max_pool : - ?padding:Owl_types.padding -> - mat -> - int array -> - int array -> - mat
val avg_pool : - ?padding:Owl_types.padding -> - mat -> - int array -> - int array -> - mat
val atan2 : mat -> mat -> mat
val scalar_atan2 : elt -> mat -> mat
val atan2_scalar : mat -> elt -> mat
val hypot : mat -> mat -> mat
val fmod : mat -> mat -> mat
val fmod_scalar : mat -> elt -> mat
val scalar_fmod : elt -> mat -> mat
val cross_entropy' : mat -> mat -> elt
val clip_by_l2norm : elt -> mat -> mat
val poisson : mu:elt -> int -> int -> mat
include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (-$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (*$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (/$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (!=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<>.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (%) : - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val (%$) : - (float, 'a) Owl_dense_matrix_generic.t -> - float -> - (float, 'a) Owl_dense_matrix_generic.t
val (**) : - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val (**$) : - (float, 'a) Owl_dense_matrix_generic.t -> - float -> - (float, 'a) Owl_dense_matrix_generic.t
val (+=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (@=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (@||) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.!{;..}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list array -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list array -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
include sig ... end
val (*@) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.%{}) : ('a, 'b) Owl_dense_matrix_generic.t -> (int * int) -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_matrix_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_matrix_generic.t -> (int * int) -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int array -> - 'a -> - unit
include sig ... end
val (**@) : - ('a, 'b) Owl_linalg_generic.t -> - float -> - ('a, 'b) Owl_linalg_generic.t
val (/@) : - ('a, 'b) Owl_linalg_generic.t -> - ('a, 'b) Owl_linalg_generic.t -> - ('a, 'b) Owl_linalg_generic.t
val mpow : Owl_linalg_d.mat -> float -> Owl_linalg_d.mat
val diag : - ?k:int -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
\ No newline at end of file diff --git a/owl/Owl_dense_matrix/Generic/index.html b/owl/Owl_dense_matrix/Generic/index.html deleted file mode 100644 index 891d5ab4a..000000000 --- a/owl/Owl_dense_matrix/Generic/index.html +++ /dev/null @@ -1,402 +0,0 @@ - -Generic (owl.Owl_dense_matrix.Generic)

Module Owl_dense_matrix.Generic

include module type of struct include Owl_dense_matrix_generic end

About the comparison of two complex numbers x and y, Owl uses the following conventions: 1) x and y are equal iff both real and imaginary parts are equal; 2) x is less than y if the magnitude of x is less than the magnitude of x; in case both x and y have the same magnitudes, x is less than x if the phase of x is less than the phase of y; 3) less or equal, greater, greater or equal relation can be further defined atop of the aforementioned conventions.

The generic module supports operations for the following Bigarry element types: Int8_signed, Int8_unsigned, Int16_signed, Int16_unsigned, Int32, Int64, Float32, Float64, Complex32, Complex64.

Type definition
type ('a, 'b) t = ('a, 'b, Stdlib.Bigarray.c_layout) Stdlib.Bigarray.Genarray.t

N-dimensional array type, i.e. Bigarray Genarray type.

Create matrices
val empty : ('a, 'b) Owl_dense_ndarray_generic.kind -> int -> int -> ('a, 'b) t

empty m n creates an m by n matrix without initialising the values of elements in x.

val create : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - int -> - int -> - 'a -> - ('a, 'b) t

create m n a creates an m by n matrix and all the elements of x are initialised with the value a.

val init : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - int -> - int -> - (int -> 'a) -> - ('a, 'b) t

init m n f creates a matrix x of shape m x n, then using f to initialise the elements in x. The input of f is 1-dimensional index of the matrix. You need to explicitly convert it if you need 2D index. The function Owl_utils.ind can help you.

val init_2d : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - int -> - int -> - (int -> int -> 'a) -> - ('a, 'b) t

init_2d m n f s almost the same as init but f receives 2D index as input. It is more convenient since you don't have to convert the index by yourself, but this also means init_2d is slower than init.

val zeros : ('a, 'b) Owl_dense_ndarray_generic.kind -> int -> int -> ('a, 'b) t

zeros m n creates an m by n matrix where all the elements are initialised to zeros.

val ones : ('a, 'b) Owl_dense_ndarray_generic.kind -> int -> int -> ('a, 'b) t

ones m n creates an m by n matrix where all the elements are ones.

val eye : ('a, 'b) Owl_dense_ndarray_generic.kind -> int -> ('a, 'b) t

eye m creates an m by m identity matrix.

val complex : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - ('c, 'd) Owl_dense_ndarray_generic.kind -> - ('a, 'b) t -> - ('a, 'b) t -> - ('c, 'd) t

complex re im constructs a complex ndarray/matrix from re and im. re and im contain the real and imaginary part of x respectively.

Note that both re and im can be complex but must have same type. The real part of re will be the real part of x and the imaginary part of im will be the imaginary part of x.

val polar : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - ('c, 'd) Owl_dense_ndarray_generic.kind -> - ('a, 'b) t -> - ('a, 'b) t -> - ('c, 'd) t

complex rho theta constructs a complex ndarray/matrix from polar coordinates rho and theta. rho contains the magnitudes and theta contains phase angles. Note that the behaviour is undefined if rho has negative elelments or theta has infinity elelments.

val unit_basis : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - int -> - int -> - ('a, 'b) t

unit_basis k n i returns a unit basis vector with ith element set to 1.

val sequential : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - ?a:'a -> - ?step:'a -> - int -> - int -> - ('a, 'b) t

sequential ~a ~step m n creates an m by n matrix. The elements in x are initialised sequentiallly from ~a and is increased by ~step.

The default value of ~a is zero whilst the default value of ~step is one.

val uniform : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - ?a:'a -> - ?b:'a -> - int -> - int -> - ('a, 'b) t

uniform m n creates an m by n matrix where all the elements follow a uniform distribution in (0,1) interval. uniform ~scale:a m n adjusts the interval to (0,a).

val gaussian : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - ?mu:'a -> - ?sigma:'a -> - int -> - int -> - ('a, 'b) t

gaussian m n creates an m by n matrix where all the elements in x follow a Gaussian distribution with specified sigma. By default sigma = 1.

val poisson : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - mu:float -> - int -> - int -> - ('a, 'b) t

poisson m n creates an m by n matrix where all the elements in x follow a Poisson distribution with specified rate mu.

val semidef : - (float, 'b) Owl_dense_ndarray_generic.kind -> - int -> - (float, 'b) t

semidef n returns an random n by n positive semi-definite matrix.

val linspace : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - 'a -> - 'a -> - int -> - ('a, 'b) t

linspace a b n linearly divides the interval [a,b] into n pieces by creating an m by 1 row vector. E.g., linspace 0. 5. 6 will create a row vector [0;1;2;3;4;5].

val logspace : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - ?base:float -> - 'a -> - 'a -> - int -> - ('a, 'b) t

logspace base a b n ... the default value of base is e.

val meshgrid : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - 'a -> - 'a -> - 'a -> - 'a -> - int -> - int -> - ('a, 'b) t * ('a, 'b) t

meshgrid a1 b1 a2 b2 n1 n2 is similar to the meshgrid function in Matlab. It returns two matrices x and y where the row vectors in x are linearly spaced between [a1,b1] by n1 whilst the column vectors in y are linearly spaced between (a2,b2) by n2.

val meshup : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t * ('a, 'b) t

meshup x y creates mesh grids by using two row vectors x and y.

val bernoulli : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - ?p:float -> - int -> - int -> - ('a, 'b) t

bernoulli k ~p:0.3 m n

val diagm : ?k:int -> ('a, 'b) t -> ('a, 'b) t

diagm k v creates a diagonal matrix using the elements in v as diagonal values. k specifies the main diagonal index. If k > 0 then it is above the main diagonal, if k < 0 then it is below the main diagonal. This function is the same as the diag function in Matlab.

val triu : ?k:int -> ('a, 'b) t -> ('a, 'b) t

triu k x returns the element on and above the kth diagonal of x. k = 0 is the main diagonal, k > 0 is above the main diagonal, and k < 0 is below the main diagonal.

val tril : ?k:int -> ('a, 'b) t -> ('a, 'b) t

tril k x returns the element on and below the kth diagonal of x. k = 0 is the main diagonal, k > 0 is above the main diagonal, and k < 0 is below the main diagonal.

val symmetric : ?upper:bool -> ('a, 'b) t -> ('a, 'b) t

symmetric ~upper x creates a symmetric matrix using either upper or lower triangular part of x. If upper is true then it uses the upper part, if upper is false, then symmetric uses the lower part. By default upper is true.

val hermitian : - ?upper:bool -> - (Stdlib.Complex.t, 'a) t -> - (Stdlib.Complex.t, 'a) t

hermitian ~upper x creates a hermitian matrix based on x. By default, the upper triangular part is used for creating the hermitian matrix, but you use the lower part by setting upper=false

val bidiagonal : ?upper:bool -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

bidiagonal upper dv ev creates a bidiagonal matrix using dv and ev. Both dv and ev are row vectors. dv is the main diagonal. If upper is true then ev is superdiagonal; if upper is false then ev is subdiagonal. By default, upper is true.

NOTE: because the diagonal elements in a hermitian matrix must be real, the function set the imaginary part of the diagonal elements to zero by default. In other words, if the diagonal elements of x have non-zero imaginary parts, the imaginary parts will be dropped without a warning.

val toeplitz : ?c:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

toeplitz ~c r generates a toeplitz matrix using r and c. Both r and c are row vectors of the same length. If the first elements of c is different from that of r, r's first element will be used.

Note: 1) If c is not passed in, then c = r will be used. 2) If c is not passed in and r is complex, the c = conj r will be used. 3) If r and c have different length, then the result is a rectangular matrix.

val hankel : ?r:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

hankel ~r c generates a hankel matrix using r and c. c will be the first column and r will be the last row of the returned matrix.

Note: 1) If only c is passed in, the elelments below the anti-diagnoal are zero. 2) If the last element of c is different from the first element of r then the first element of c prevails. 3) c and r can have different length, the return will be an rectangular matrix.

val hadamard : ('a, 'b) Owl_dense_ndarray_generic.kind -> int -> ('a, 'b) t

hadamard k n constructs a hadamard matrix of order n. For a hadamard H, we have H'*H = n*I. Currently, this function handles only the cases where n, n/12, or n/20 is a power of 2.

val magic : ('a, 'b) Owl_dense_ndarray_generic.kind -> int -> ('a, 'b) t

magic k n constructs a n x n magic square matrix x. The elements in x are consecutive numbers increasing from 1 to n^2. n must n >= 3.

There are three different algorithms to deal with n is odd, singly even, and doubly even respectively.

Obtain basic properties
val shape : ('a, 'b) t -> int * int

If x is an m by n matrix, shape x returns (m,n), i.e., the size of two dimensions of x.

val row_num : ('a, 'b) t -> int

row_num x returns the number of rows in matrix x.

val col_num : ('a, 'b) t -> int

col_num x returns the number of columns in matrix x.

val numel : ('a, 'b) t -> int

numel x returns the number of elements in matrix x. It is equivalent to (row_num x) * (col_num x).

val nnz : ('a, 'b) t -> int

nnz x returns the number of non-zero elements in x.

val density : ('a, 'b) t -> float

density x returns the percentage of non-zero elements in x.

val size_in_bytes : ('a, 'b) t -> int

size_in_bytes x returns the size of x in bytes in memory.

val same_shape : ('a, 'b) t -> ('a, 'b) t -> bool

same_shape x y returns true if two matrics have the same shape.

val same_data : ('a, 'b) t -> ('a, 'b) t -> bool

Refer to :doc:`owl_dense_ndarray_generic`.

val kind : ('a, 'b) t -> ('a, 'b) Owl_dense_ndarray_generic.kind

kind x returns the type of matrix x.

Manipulate a matrix
val get : ('a, 'b) t -> int -> int -> 'a

get x i j returns the value of element (i,j) of x. The shorthand for get x i j is x.{i,j}

val set : ('a, 'b) t -> int -> int -> 'a -> unit

set x i j a sets the element (i,j) of x to value a. The shorthand for set x i j a is x.{i,j} <- a

val get_index : ('a, 'b) t -> int array array -> 'a array

get_index i x returns an array of element values specified by the indices i. The length of array i equals the number of dimensions of x. The arrays in i must have the same length, and each represents the indices in that dimension.

E.g., [| [|1;2|]; [|3;4|] |] returns the value of elements at position (1,3) and (2,4) respectively.

val set_index : ('a, 'b) t -> int array array -> 'a array -> unit

set_index sets the value of elements in x according to the indices specified by i. The length of array i equals the number of dimensions of x. The arrays in i must have the same length, and each represents the indices in that dimension.

val get_fancy : Owl_types.index list -> ('a, 'b) t -> ('a, 'b) t

get_fancy s x returns a copy of the slice in x. The slice is defined by a which is an int array. Please refer to the same function in the Owl_dense_ndarray_generic documentation for more details.

val set_fancy : Owl_types.index list -> ('a, 'b) t -> ('a, 'b) t -> unit

set_fancy axis x y set the slice defined by axis in x according to the values in y. y must have the same shape as the one defined by axis.

About the slice definition of axis, please refer to slice function.

val get_fancy_ext : Owl_types.index array -> ('a, 'b) t -> ('a, 'b) t

This function is used for extended indexing operator since ocaml 4.10.0. The indexing and slicing syntax become much ligher.

val set_fancy_ext : Owl_types.index array -> ('a, 'b) t -> ('a, 'b) t -> unit

This function is used for extended indexing operator since ocaml 4.10.0. The indexing and slicing syntax become much ligher.

val get_slice : int list list -> ('a, 'b) t -> ('a, 'b) t

get_slice axis x aims to provide a simpler version of get_fancy. This function assumes that every list element in the passed in in list list represents a range, i.e., R constructor.

E.g., [[];[0;3];[0]] is equivalent to [R []; R [0;3]; R [0]].

val set_slice : int list list -> ('a, 'b) t -> ('a, 'b) t -> unit

set_slice axis x y aims to provide a simpler version of set_slice. This function assumes that every list element in the passed in in list list represents a range, i.e., R constructor.

E.g., [[];[0;3];[0]] is equivalent to [R []; R [0;3]; R [0]].

val get_slice_ext : int list array -> ('a, 'b) t -> ('a, 'b) t

Please refer to Ndarray document.

val set_slice_ext : int list array -> ('a, 'b) t -> ('a, 'b) t -> unit

Please refer to Ndarray document.

val row : ('a, 'b) t -> int -> ('a, 'b) t

row x i returns row i of x. Note: Unlike col, the return value is simply a view onto the original row in x, so modifying row's value also alters x.

The function supports nagative indices.

val col : ('a, 'b) t -> int -> ('a, 'b) t

col x j returns column j of x. Note: Unlike row, the return value is a copy of the original row in x.

The function supports nagative indices.

val rows : ('a, 'b) t -> int array -> ('a, 'b) t

rows x a returns the rows (defined in an int array a) of x. The returned rows will be combined into a new dense matrix. The order of rows in the new matrix is the same as that in the array a.

The function supports nagative indices.

val cols : ('a, 'b) t -> int array -> ('a, 'b) t

Similar to rows, cols x a returns the columns (specified in array a) of x in a new dense matrix.

The function supports nagative indices.

val resize : ?head:bool -> ('a, 'b) t -> int array -> ('a, 'b) t

resize x s please refer to the Ndarray document.

val reshape : ('a, 'b) t -> int array -> ('a, 'b) t

reshape x s returns a new m by n matrix from the m' by n' matrix x. Note that (m * n) must be equal to (m' * n'), and the returned matrix shares the same memory with the original x.

val flatten : ('a, 'b) t -> ('a, 'b) t

flatten x reshape x into a 1 by n row vector without making a copy. Therefore the returned value shares the same memory space with original x.

val reverse : ('a, 'b) t -> ('a, 'b) t

reverse x reverse the order of all elements in the flattened x and returns the results in a new matrix. The original x remains intact.

val flip : ?axis:int -> ('a, 'b) t -> ('a, 'b) t

flip ~axis x flips a matrix/ndarray along axis. By default axis = 0. The result is returned in a new matrix/ndarray, so the original x remains intact.

val rotate : ('a, 'b) t -> int -> ('a, 'b) t

rotate x d rotates x clockwise d degrees. d must be multiple times of 90, otherwise the function will fail. If x is an n-dimensional array, then the function rotates the plane formed by the first and second dimensions.

val reset : ('a, 'b) t -> unit

reset x resets all the elements of x to zero value.

val fill : ('a, 'b) t -> 'a -> unit

fill x a fills the x with value a.

val copy : ('a, 'b) t -> ('a, 'b) t

copy x returns a copy of matrix x.

val copy_row_to : ('a, 'b) t -> ('a, 'b) t -> int -> unit

copy_row_to v x i copies an 1 by n row vector v to the ith row in an m by n matrix x.

val copy_col_to : ('a, 'b) t -> ('a, 'b) t -> int -> unit

copy_col_to v x j copies an 1 by n column vector v to the jth column in an m by n matrix x.

val concat_vertical : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

concat_vertical x y concats two matrices x and y vertically, therefore their column numbers must be the same.

The associated operator is @=, please refer to :doc:`owl_operator`.

val concat_horizontal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

concat_horizontal x y concats two matrices x and y horizontally, therefore their row numbers must be the same.

The associated operator is @||, please refer to :doc:`owl_operator`.

val concat_vh : ('a, 'b) t array array -> ('a, 'b) t

concat_vh is used to assemble small parts of matrices into a bigger one. E.g. [| [|a; b; c|]; [|d; e; f|]; [|g; h; i|] |] will be concatenated into a big matrix as follows.

Please refer to :doc:`owl_dense_ndarray_generic`. for details.

val concatenate : ?axis:int -> ('a, 'b) t array -> ('a, 'b) t

concatenate ~axis:1 x concatenates an array of matrices along the second dimension. For the matrices in x, they must have the same shape except the dimension specified by axis. The default value of axis is 0, i.e., the lowest dimension on a marix, i.e., rows.

val split : ?axis:int -> int array -> ('a, 'b) t -> ('a, 'b) t array

split ~axis parts x splits an ndarray x into parts along the specified axis. This function is the inverse operation of concatenate. The elements in x must sum up to the dimension in the specified axis.

val split_vh : (int * int) array array -> ('a, 'b) t -> ('a, 'b) t array array

Please refer to :doc:`owl_dense_ndarray_generic`. for details.

val transpose : ('a, 'b) t -> ('a, 'b) t

transpose x transposes an m by n matrix to n by m one.

val ctranspose : ('a, 'b) t -> ('a, 'b) t

ctranspose x performs conjugate transpose of a complex matrix x. If x is a real matrix, then ctranspose x is equivalent to transpose x.

val diag : ?k:int -> ('a, 'b) t -> ('a, 'b) t

diag k x returns the kth diagonal elements of x. k > 0 means above the main diagonal and k < 0 means the below the main diagonal.

val swap_rows : ('a, 'b) t -> int -> int -> unit

swap_rows x i i' swaps the row i with row i' of x.

val swap_cols : ('a, 'b) t -> int -> int -> unit

swap_cols x j j' swaps the column j with column j' of x.

val tile : ('a, 'b) t -> int array -> ('a, 'b) t

tile x a provides the exact behaviour as numpy.tile function.

val repeat : ('a, 'b) t -> int array -> ('a, 'b) t

repeat x a repeats the elements x according the repetition specified by a.

val pad : ?v:'a -> int list list -> ('a, 'b) t -> ('a, 'b) t

padd ~v:0. [[1;1]] x

val dropout : ?rate:float -> ('a, 'b) t -> ('a, 'b) t

dropout ~rate:0.3 x drops out 30% of the elements in x, in other words, by setting their values to zeros.

val top : ('a, 'b) t -> int -> int array array

top x n returns the indices of n greatest values of x. The indices are arranged according to the corresponding element values, from the greatest one to the smallest one.

val bottom : ('a, 'b) t -> int -> int array array

bottom x n returns the indices of n smallest values of x. The indices are arranged according to the corresponding element values, from the smallest one to the greatest one.

val sort : ('a, 'b) t -> ('a, 'b) t

sort x performs quicksort of the elelments in x. A new copy is returned as result, the original x remains intact. If you want to perform in-place sorting, please use `sort_` instead.

val argsort : ('a, 'b) t -> (int64, Stdlib.Bigarray.int64_elt) t

argsort x returns the indices with which the elements in x are sorted in increasing order. Note that the returned index ndarray has the same shape as that of x, and the indices are 1D indices.

Iteration functions
val iteri : (int -> 'a -> unit) -> ('a, 'b) t -> unit

iteri f x iterates all the elements in x and applies the user defined function f : int -> int -> float -> 'a. f i j v takes three parameters, i and j are the coordinates of current element, and v is its value.

val iter : ('a -> unit) -> ('a, 'b) t -> unit

iter f x is the same as as iteri f x except the coordinates of the current element is not passed to the function f : float -> 'a

val mapi : (int -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t

mapi f x maps each element in x to a new value by applying f : int -> int -> float -> float. The first two parameters are the coordinates of the element, and the third parameter is the value.

val map : ('a -> 'a) -> ('a, 'b) t -> ('a, 'b) t

map f x is similar to mapi f x except the coordinates of the current element is not passed to the function f : float -> float

val foldi : - ?axis:int -> - (int -> 'a -> 'a -> 'a) -> - 'a -> - ('a, 'b) t -> - ('a, 'b) t

foldi ~axis f a x folds (or reduces) the elements in x from left along the specified axis using passed in function f. a is the initial element and in f i acc b acc is the accumulater and b is one of the elements in x along the same axis. Note that i is 1d index of b.

val fold : ?axis:int -> ('a -> 'a -> 'a) -> 'a -> ('a, 'b) t -> ('a, 'b) t

Similar to foldi, except that the index of an element is not passed to f.

val scani : ?axis:int -> (int -> 'a -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t

scan ~axis f x scans the x along the specified axis using passed in function f. f acc a b returns an updated acc which will be passed in the next call to f i acc a. This function can be used to implement accumulative operations such as sum and prod functions. Note that the i is 1d index of a in x.

val scan : ?axis:int -> ('a -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t

Similar to scani, except that the index of an element is not passed to f.

val filteri : (int -> 'a -> bool) -> ('a, 'b) t -> int array

filteri f x uses f : int -> int -> float -> bool to filter out certain elements in x. An element will be included if f returns true. The returned result is a list of coordinates of the selected elements.

val filter : ('a -> bool) -> ('a, 'b) t -> int array

Similar to filteri, but the coordinates of the elements are not passed to the function f : float -> bool.

val iteri_2d : (int -> int -> 'a -> unit) -> ('a, 'b) t -> unit

Similar to `iteri` but 2d indices (i,j) are passed to the user function.

val mapi_2d : (int -> int -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t

Similar to `mapi` but 2d indices (i,j) are passed to the user function.

val foldi_2d : - ?axis:int -> - (int -> int -> 'a -> 'a -> 'a) -> - 'a -> - ('a, 'b) t -> - ('a, 'b) t

Similar to `foldi` but 2d indices (i,j) are passed to the user function.

val scani_2d : - ?axis:int -> - (int -> int -> 'a -> 'a -> 'a) -> - ('a, 'b) t -> - ('a, 'b) t

Similar to `scani` but 2d indices (i,j) are passed to the user function.

val filteri_2d : (int -> int -> 'a -> bool) -> ('a, 'b) t -> (int * int) array

Similar to `filteri` but 2d indices (i,j) are returned.

val iter2i_2d : - (int -> int -> 'a -> 'c -> unit) -> - ('a, 'b) t -> - ('c, 'd) t -> - unit

Similar to `iter2i` but 2d indices (i,j) are passed to the user function.

val map2i_2d : - (int -> int -> 'a -> 'a -> 'a) -> - ('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t

Similar to `map2i` but 2d indices (i,j) are passed to the user function.

val iter2i : (int -> 'a -> 'b -> unit) -> ('a, 'c) t -> ('b, 'd) t -> unit

Similar to iteri but applies to two matrices x and y. Both x and y must have the same shape.

val iter2 : ('a -> 'b -> unit) -> ('a, 'c) t -> ('b, 'd) t -> unit

Similar to iter2i, except that the index is not passed to f.

val map2i : (int -> 'a -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

map2i f x y applies f to two elements of the same position in both x and y. Note that 1d index is passed to function f.

val map2 : ('a -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

map2 f x y is similar to map2i f x y except the index is not passed.

val iteri_rows : (int -> ('a, 'b) t -> unit) -> ('a, 'b) t -> unit

iteri_rows f x iterates every row in x and applies function f : int -> mat -> unit to each of them.

val iter_rows : (('a, 'b) t -> unit) -> ('a, 'b) t -> unit

Similar to iteri_rows except row number is not passed to f.

val iter2i_rows : - (int -> ('a, 'b) t -> ('a, 'b) t -> unit) -> - ('a, 'b) t -> - ('a, 'b) t -> - unit

iter2_rows f x y iterates rows of two matrices x and `y.

val iter2_rows : - (('a, 'b) t -> ('a, 'b) t -> unit) -> - ('a, 'b) t -> - ('a, 'b) t -> - unit

Similar to iter2iter2i_rows but without passing in indices.

val iteri_cols : (int -> ('a, 'b) t -> unit) -> ('a, 'b) t -> unit

iteri_cols f x iterates every column in x and applies function f : int -> mat -> unit to each of them. Column number is passed to f as the first parameter.

val iter_cols : (('a, 'b) t -> unit) -> ('a, 'b) t -> unit

Similar to iteri_cols except col number is not passed to f.

val filteri_rows : (int -> ('a, 'b) t -> bool) -> ('a, 'b) t -> int array

filteri_rows f x uses function f : int -> mat -> bool to check each row in x, then returns an int array containing the indices of those rows which satisfy the function f.

val filter_rows : (('a, 'b) t -> bool) -> ('a, 'b) t -> int array

Similar to filteri_rows except that the row indices are not passed to f.

val filteri_cols : (int -> ('a, 'b) t -> bool) -> ('a, 'b) t -> int array

filteri_cols f x uses function f : int -> mat -> bool to check each column in x, then returns an int array containing the indices of those columns which satisfy the function f.

val filter_cols : (('a, 'b) t -> bool) -> ('a, 'b) t -> int array

Similar to filteri_cols except that the column indices are not passed to f.

val fold_rows : ('c -> ('a, 'b) t -> 'c) -> 'c -> ('a, 'b) t -> 'c

fold_rows f a x folds all the rows in x using function f. The order of folding is from the first row to the last one.

val fold_cols : ('c -> ('a, 'b) t -> 'c) -> 'c -> ('a, 'b) t -> 'c

fold_cols f a x folds all the columns in x using function f. The order of folding is from the first column to the last one.

val mapi_rows : (int -> ('a, 'b) t -> 'c) -> ('a, 'b) t -> 'c array

mapi_rows f x maps every row in x to a type 'a value by applying function f : int -> mat -> 'a to each of them. The results is an array of all the returned values.

val map_rows : (('a, 'b) t -> 'c) -> ('a, 'b) t -> 'c array

Similar to mapi_rows except row number is not passed to f.

val mapi_cols : (int -> ('a, 'b) t -> 'c) -> ('a, 'b) t -> 'c array

mapi_cols f x maps every column in x to a type 'a value by applying function f : int -> mat -> 'a.

val map_cols : (('a, 'b) t -> 'c) -> ('a, 'b) t -> 'c array

Similar to mapi_cols except column number is not passed to f.

val mapi_by_row : - int -> - (int -> ('a, 'b) t -> ('a, 'b) t) -> - ('a, 'b) t -> - ('a, 'b) t

mapi_by_row d f x applies f to each row of a m by n matrix x, then uses the returned d dimensional row vectors to assemble a new m by d matrix.

val map_by_row : int -> (('a, 'b) t -> ('a, 'b) t) -> ('a, 'b) t -> ('a, 'b) t

map_by_row d f x is similar to mapi_by_row except that the row indices are not passed to f.

val mapi_by_col : - int -> - (int -> ('a, 'b) t -> ('a, 'b) t) -> - ('a, 'b) t -> - ('a, 'b) t

mapi_by_col d f x applies f to each column of a m by n matrix x, then uses the returned d dimensional column vectors to assemble a new d by n matrix.

val map_by_col : int -> (('a, 'b) t -> ('a, 'b) t) -> ('a, 'b) t -> ('a, 'b) t

map_by_col d f x is similar to mapi_by_col except that the column indices are not passed to f.

val mapi_at_row : (int -> 'a -> 'a) -> ('a, 'b) t -> int -> ('a, 'b) t

mapi_at_row f x i creates a new matrix by applying function f only to the ith row in matrix x.

val map_at_row : ('a -> 'a) -> ('a, 'b) t -> int -> ('a, 'b) t

map_at_row f x i is similar to mapi_at_row except that the coordinates of an element is not passed to f.

val mapi_at_col : (int -> 'a -> 'a) -> ('a, 'b) t -> int -> ('a, 'b) t

mapi_at_col f x j creates a new matrix by applying function f only to the jth column in matrix x.

val map_at_col : ('a -> 'a) -> ('a, 'b) t -> int -> ('a, 'b) t

map_at_col f x i is similar to mapi_at_col except that the coordinates of an element is not passed to f.

Examination & Comparison
val exists : ('a -> bool) -> ('a, 'b) t -> bool

exists f x checks all the elements in x using f. If at least one element satisfies f then the function returns true otherwise false.

val not_exists : ('a -> bool) -> ('a, 'b) t -> bool

not_exists f x checks all the elements in x, the function returns true only if all the elements fail to satisfy f : float -> bool.

val for_all : ('a -> bool) -> ('a, 'b) t -> bool

for_all f x checks all the elements in x, the function returns true if and only if all the elements pass the check of function f.

val is_zero : ('a, 'b) t -> bool

is_zero x returns true if all the elements in x are zeros.

val is_positive : ('a, 'b) t -> bool

is_positive x returns true if all the elements in x are positive.

val is_negative : ('a, 'b) t -> bool

is_negative x returns true if all the elements in x are negative.

val is_nonpositive : ('a, 'b) t -> bool

is_nonpositive returns true if all the elements in x are non-positive.

val is_nonnegative : ('a, 'b) t -> bool

is_nonnegative returns true if all the elements in x are non-negative.

val is_normal : ('a, 'b) t -> bool

is_normal x returns true if all the elelments in x are normal float numbers, i.e., not NaN, not INF, not SUBNORMAL. Please refer to

https://www.gnu.org/software/libc/manual/html_node/Floating-Point-Classes.html https://www.gnu.org/software/libc/manual/html_node/Infinity-and-NaN.html#Infinity-and-NaN

val not_nan : ('a, 'b) t -> bool

not_nan x returns false if there is any NaN element in x. Otherwise, the function returns true indicating all the numbers in x are not NaN.

val not_inf : ('a, 'b) t -> bool

not_inf x returns false if there is any positive or negative INF element in x. Otherwise, the function returns true.

val equal : ('a, 'b) t -> ('a, 'b) t -> bool

equal x y returns true if two matrices x and y are equal.

val not_equal : ('a, 'b) t -> ('a, 'b) t -> bool

not_equal x y returns true if there is at least one element in x is not equal to that in y.

val greater : ('a, 'b) t -> ('a, 'b) t -> bool

greater x y returns true if all the elements in x are greater than the corresponding elements in y.

val less : ('a, 'b) t -> ('a, 'b) t -> bool

less x y returns true if all the elements in x are smaller than the corresponding elements in y.

val greater_equal : ('a, 'b) t -> ('a, 'b) t -> bool

greater_equal x y returns true if all the elements in x are not smaller than the corresponding elements in y.

val less_equal : ('a, 'b) t -> ('a, 'b) t -> bool

less_equal x y returns true if all the elements in x are not greater than the corresponding elements in y.

val elt_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

elt_equal x y performs element-wise = comparison of x and y. Assume that a is from x and b is the corresponding element of a from y of the same position. The function returns another binary (0 and 1) ndarray/matrix wherein 1 indicates a = b.

val elt_not_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

elt_not_equal x y performs element-wise != comparison of x and y. Assume that a is from x and b is the corresponding element of a from y of the same position. The function returns another binary (0 and 1) ndarray/matrix wherein 1 indicates a <> b.

val elt_less : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

elt_less x y performs element-wise < comparison of x and y. Assume that a is from x and b is the corresponding element of a from y of the same position. The function returns another binary (0 and 1) ndarray/matrix wherein 1 indicates a < b.

val elt_greater : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

elt_greater x y performs element-wise > comparison of x and y. Assume that a is from x and b is the corresponding element of a from y of the same position. The function returns another binary (0 and 1) ndarray/matrix wherein 1 indicates a > b.

val elt_less_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

elt_less_equal x y performs element-wise <= comparison of x and y. Assume that a is from x and b is the corresponding element of a from y of the same position. The function returns another binary (0 and 1) ndarray/matrix wherein 1 indicates a <= b.

val elt_greater_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

elt_greater_equal x y performs element-wise >= comparison of x and y. Assume that a is from x and b is the corresponding element of a from y of the same position. The function returns another binary (0 and 1) ndarray/matrix wherein 1 indicates a >= b.

val equal_scalar : ('a, 'b) t -> 'a -> bool

equal_scalar x a checks if all the elements in x are equal to a. The function returns true iff for every element b in x, b = a.

val not_equal_scalar : ('a, 'b) t -> 'a -> bool

not_equal_scalar x a checks if all the elements in x are not equal to a. The function returns true iff for every element b in x, b <> a.

val less_scalar : ('a, 'b) t -> 'a -> bool

less_scalar x a checks if all the elements in x are less than a. The function returns true iff for every element b in x, b < a.

val greater_scalar : ('a, 'b) t -> 'a -> bool

greater_scalar x a checks if all the elements in x are greater than a. The function returns true iff for every element b in x, b > a.

val less_equal_scalar : ('a, 'b) t -> 'a -> bool

less_equal_scalar x a checks if all the elements in x are less or equal to a. The function returns true iff for every element b in x, b <= a.

val greater_equal_scalar : ('a, 'b) t -> 'a -> bool

greater_equal_scalar x a checks if all the elements in x are greater or equal to a. The function returns true iff for every element b in x, b >= a.

val elt_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

elt_equal_scalar x a performs element-wise = comparison of x and a. Assume that b is one element from x The function returns another binary (0 and 1) ndarray/matrix wherein 1 of the corresponding position indicates a = b, otherwise 0.

val elt_not_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

elt_not_equal_scalar x a performs element-wise != comparison of x and a. Assume that b is one element from x The function returns another binary (0 and 1) ndarray/matrix wherein 1 of the corresponding position indicates a <> b, otherwise 0.

val elt_less_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

elt_less_scalar x a performs element-wise < comparison of x and a. Assume that b is one element from x The function returns another binary (0 and 1) ndarray/matrix wherein 1 of the corresponding position indicates a < b, otherwise 0.

val elt_greater_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

elt_greater_scalar x a performs element-wise > comparison of x and a. Assume that b is one element from x The function returns another binary (0 and 1) ndarray/matrix wherein 1 of the corresponding position indicates a > b, otherwise 0.

val elt_less_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

elt_less_equal_scalar x a performs element-wise <= comparison of x and a. Assume that b is one element from x The function returns another binary (0 and 1) ndarray/matrix wherein 1 of the corresponding position indicates a <= b, otherwise 0.

val elt_greater_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

elt_greater_equal_scalar x a performs element-wise >= comparison of x and a. Assume that b is one element from x The function returns another binary (0 and 1) ndarray/matrix wherein 1 of the corresponding position indicates a >= b, otherwise 0.

val approx_equal : ?eps:float -> ('a, 'b) t -> ('a, 'b) t -> bool

approx_equal ~eps x y returns true if x and y are approximately equal, i.e., for any two elements a from x and b from y, we have abs (a - b) < eps.

Note: the threshold check is exclusive for passed in eps.

val approx_equal_scalar : ?eps:float -> ('a, 'b) t -> 'a -> bool

approx_equal_scalar ~eps x a returns true all the elements in x are approximately equal to a, i.e., abs (x - a) < eps. For complex numbers, the eps applies to both real and imaginary part.

Note: the threshold check is exclusive for the passed in eps.

val approx_elt_equal : ?eps:float -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

approx_elt_equal ~eps x y compares the element-wise equality of x and y, then returns another binary (i.e., 0 and 1) ndarray/matrix wherein 1 indicates that two corresponding elements a from x and b from y are considered as approximately equal, namely abs (a - b) < eps.

val approx_elt_equal_scalar : ?eps:float -> ('a, 'b) t -> 'a -> ('a, 'b) t

approx_elt_equal_scalar ~eps x a compares all the elements of x to a scalar value a, then returns another binary (i.e., 0 and 1) ndarray/matrix wherein 1 indicates that the element b from x is considered as approximately equal to a, namely abs (a - b) < eps.

Randomisation functions
val draw_rows : - ?replacement:bool -> - ('a, 'b) t -> - int -> - ('a, 'b) t * int array

draw_rows x m draws m rows randomly from x. The row indices are also returned in an int array along with the selected rows. The parameter replacement indicates whether the drawing is by replacement or not.

val draw_cols : - ?replacement:bool -> - ('a, 'b) t -> - int -> - ('a, 'b) t * int array

draw_cols x m draws m cols randomly from x. The column indices are also returned in an int array along with the selected columns. The parameter replacement indicates whether the drawing is by replacement or not.

val draw_rows2 : - ?replacement:bool -> - ('a, 'b) t -> - ('a, 'b) t -> - int -> - ('a, 'b) t * ('a, 'b) t * int array

draw_rows2 x y c is similar to draw_rows but applies to two matrices.

val draw_cols2 : - ?replacement:bool -> - ('a, 'b) t -> - ('a, 'b) t -> - int -> - ('a, 'b) t * ('a, 'b) t * int array

draw_col2 x y c is similar to draw_cols but applies to two matrices.

val shuffle_rows : ('a, 'b) t -> ('a, 'b) t

shuffle_rows x shuffles all the rows in matrix x.

val shuffle_cols : ('a, 'b) t -> ('a, 'b) t

shuffle_cols x shuffles all the columns in matrix x.

val shuffle : ('a, 'b) t -> ('a, 'b) t

shuffle x shuffles all the elements in x by first shuffling along the rows then shuffling along columns. It is equivalent to shuffle_cols (shuffle_rows x).

Input/Output functions
val to_array : ('a, 'b) t -> 'a array

to_array x flattens an m by n matrix x then returns x as an float array of length (numel x).

val of_array : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - 'a array -> - int -> - int -> - ('a, 'b) t

of_array x m n converts a float array x into an m by n matrix. Note the length of x must be equal to (m * n).

Similar to reshape function, you can pass in one negative index to let Owl automatically infer its dimension.

val to_arrays : ('a, 'b) t -> 'a array array

to arrays x returns an array of float arrays, wherein each row in x becomes an array in the result.

val of_arrays : - ('a, 'b) Owl_dense_ndarray_generic.kind -> - 'a array array -> - ('a, 'b) t

of_arrays x converts an array of m float arrays (of length n) in to an m by n matrix.

val to_rows : ('a, 'b) t -> ('a, 'b) t array
val of_rows : ('a, 'b) t array -> ('a, 'b) t
val to_cols : ('a, 'b) t -> ('a, 'b) t array
val of_cols : ('a, 'b) t array -> ('a, 'b) t
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:('a -> string) -> - ('a, 'b) t -> - unit

print x pretty prints matrix x without headings.

val save : out:string -> ('a, 'b) t -> unit

save x ~out saves the matrix x to a file with the name out. The format is binary by using Marshal module to serialise the matrix.

val load : ('a, 'b) Owl_dense_ndarray_generic.kind -> string -> ('a, 'b) t

load f loads a matrix from file f. The file must be previously saved by using save function.

val save_txt : ?sep:string -> ?append:bool -> out:string -> ('a, 'b) t -> unit

save_txt ~sep ~append ~out x saves the matrix x into a text file out delimited by the specified string sep (default: tab). If append is false (it is by default), an existing file will be truncated and overwritten. If append is true and the file exists, new rows will be appended to it. Files are created, if necessary, with the AND of 0o644 and the user's umask value. Note that the operation can be very time consuming.

val load_txt : - ?sep:string -> - ('a, 'b) Owl_dense_ndarray_generic.kind -> - string -> - ('a, 'b) t

load_txt ~sep k f load a text file f into a matrix of type k. The delimitor is specified by sep which can be a regular expression.

val save_npy : out:string -> ('a, 'b) t -> unit

save_npy ~out x saves the matrix x into a npy file out. This function is implemented using npy-ocaml https://github.com/LaurentMazare/npy-ocaml.

val load_npy : ('a, 'b) Owl_dense_ndarray_generic.kind -> string -> ('a, 'b) t

load_npy file load a npy file into a matrix of type k. If the matrix is in the file is not of type k, fails with [file]: incorrect format. This function is implemented using npy-ocaml https://github.com/LaurentMazare/npy-ocaml.

Unary math operators
val re_c2s : - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t -> - (float, Stdlib.Bigarray.float32_elt) t

re_c2s x returns all the real components of x in a new ndarray of same shape.

val re_z2d : - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t -> - (float, Stdlib.Bigarray.float64_elt) t

re_d2z x returns all the real components of x in a new ndarray of same shape.

val im_c2s : - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t -> - (float, Stdlib.Bigarray.float32_elt) t

im_c2s x returns all the imaginary components of x in a new ndarray of same shape.

val im_z2d : - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t -> - (float, Stdlib.Bigarray.float64_elt) t

im_d2z x returns all the imaginary components of x in a new ndarray of same shape.

val min : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

min x returns the minimum of all elements in x along specified axis. If no axis is specified, x will be flattened and the minimum of all the elements will be returned. For two complex numbers, the one with the smaller magnitude will be selected. If two magnitudes are the same, the one with the smaller phase will be selected.

val min' : ('a, 'b) t -> 'a

min' x is similar to min but returns the minimum of all elements in x in scalar value.

val max : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

max x returns the maximum of all elements in x along specified axis. If no axis is specified, x will be flattened and the maximum of all the elements will be returned. For two complex numbers, the one with the greater magnitude will be selected. If two magnitudes are the same, the one with the greater phase will be selected.

val max' : ('a, 'b) t -> 'a

max' x is similar to max but returns the maximum of all elements in x in scalar value.

val minmax : - ?axis:int -> - ?keep_dims:bool -> - ('a, 'b) t -> - ('a, 'b) t * ('a, 'b) t

minmax' x returns (min_v, max_v), min_v is the minimum value in x while max_v is the maximum.

val minmax' : ('a, 'b) t -> 'a * 'a

minmax' x returns (min_v, max_v), min_v is the minimum value in x while max_v is the maximum.

val min_i : ('a, 'b) t -> 'a * int array

min_i x returns the minimum of all elements in x as well as its index.

val max_i : ('a, 'b) t -> 'a * int array

max_i x returns the maximum of all elements in x as well as its index.

val minmax_i : ('a, 'b) t -> ('a * int array) * ('a * int array)

minmax_i x returns ((min_v,min_i), (max_v,max_i)) where (min_v,min_i) is the minimum value in x along with its index while (max_v,max_i) is the maximum value along its index.

val trace : ('a, 'b) t -> 'a

trace x returns the sum of diagonal elements in x.

val sum : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

sum_ axis x sums the elements in x along specified axis.

val sum' : ('a, 'b) t -> 'a

sum x returns the summation of all the elements in x.

val prod : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

prod_ axis x multiplies the elements in x along specified axis.

val prod' : ('a, 'b) t -> 'a

prod x returns the product of all the elements in x.

val mean : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

mean ~axis x calculates the mean along specified axis.

val mean' : ('a, 'b) t -> 'a

mean' x calculates the mean of all the elements in x.

val var : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

var ~axis x calculates the variance along specified axis.

val var' : ('a, 'b) t -> 'a

var' x calculates the variance of all the elements in x.

val std : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

std ~axis calculates the standard deviation along specified axis.

val std' : ('a, 'b) t -> 'a

std' x calculates the standard deviation of all the elements in x.

val sem : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

sem ~axis calculates the standard deviation along specified axis.

val sem' : ('a, 'b) t -> 'a

sem' x calculates the standard deviation of all the elements in x.

val sum_rows : ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

sum_rows x returns the summation of all the row vectors in x.

val sum_cols : ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

sum_cols returns the summation of all the column vectors in x.

val mean_rows : ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

mean_rows x returns the mean value of all row vectors in x. It is equivalent to div_scalar (sum_rows x) (float_of_int (row_num x)).

val mean_cols : ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

mean_cols x returns the mean value of all column vectors in x. It is equivalent to div_scalar (sum_cols x) (float_of_int (col_num x)).

val min_rows : (float, 'b) t -> (float * int * int) array

min_rows x returns the minimum value in each row along with their coordinates.

val min_cols : (float, 'b) t -> (float * int * int) array

min_cols x returns the minimum value in each column along with their coordinates.

val max_rows : (float, 'b) t -> (float * int * int) array

max_rows x returns the maximum value in each row along with their coordinates.

val max_cols : (float, 'b) t -> (float * int * int) array

max_cols x returns the maximum value in each column along with their coordinates.

val abs : ('a, 'b) t -> ('a, 'b) t

abs x returns the absolute value of all elements in x in a new matrix.

val abs_c2s : - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t -> - (float, Stdlib.Bigarray.float32_elt) t

abs_c2s x is similar to abs but takes complex32 as input.

val abs_z2d : - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t -> - (float, Stdlib.Bigarray.float64_elt) t

abs_z2d x is similar to abs but takes complex64 as input.

val abs2 : ('a, 'b) t -> ('a, 'b) t

abs2 x returns the square of absolute value of all elements in x in a new ndarray.

val abs2_c2s : - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t -> - (float, Stdlib.Bigarray.float32_elt) t

abs2_c2s x is similar to abs2 but takes complex32 as input.

val abs2_z2d : - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t -> - (float, Stdlib.Bigarray.float64_elt) t

abs2_z2d x is similar to abs2 but takes complex64 as input.

val conj : ('a, 'b) t -> ('a, 'b) t

conj x computes the conjugate of the elements in x and returns the result in a new matrix. If the passed in x is a real matrix, the function simply returns a copy of the original x.

val neg : ('a, 'b) t -> ('a, 'b) t

neg x negates the elements in x and returns the result in a new matrix.

val reci : ('a, 'b) t -> ('a, 'b) t

reci x computes the reciprocal of every elements in x and returns the result in a new ndarray.

val reci_tol : ?tol:'a -> ('a, 'b) t -> ('a, 'b) t

reci_tol ~tol x computes the reciprocal of every element in x. Different from reci, reci_tol sets the elements whose abs value smaller than tol to zeros. If tol is not specified, the default Owl_utils.eps Float32 will be used. For complex numbers, refer to Owl's doc to see how to compare.

val signum : (float, 'a) t -> (float, 'a) t

signum computes the sign value (-1 for negative numbers, 0 (or -0) for zero, 1 for positive numbers, nan for nan).

val sqr : ('a, 'b) t -> ('a, 'b) t

sqr x computes the square of the elements in x and returns the result in a new matrix.

val sqrt : ('a, 'b) t -> ('a, 'b) t

sqrt x computes the square root of the elements in x and returns the result in a new matrix.

val cbrt : ('a, 'b) t -> ('a, 'b) t

cbrt x computes the cubic root of the elements in x and returns the result in a new matrix.

val exp : ('a, 'b) t -> ('a, 'b) t

exp x computes the exponential of the elements in x and returns the result in a new matrix.

val exp2 : ('a, 'b) t -> ('a, 'b) t

exp2 x computes the base-2 exponential of the elements in x and returns the result in a new matrix.

val exp10 : ('a, 'b) t -> ('a, 'b) t

exp2 x computes the base-10 exponential of the elements in x and returns the result in a new matrix.

val expm1 : ('a, 'b) t -> ('a, 'b) t

expm1 x computes exp x -. 1. of the elements in x and returns the result in a new matrix.

val log : ('a, 'b) t -> ('a, 'b) t

log x computes the logarithm of the elements in x and returns the result in a new matrix.

val log10 : ('a, 'b) t -> ('a, 'b) t

log10 x computes the base-10 logarithm of the elements in x and returns the result in a new matrix.

val log2 : ('a, 'b) t -> ('a, 'b) t

log2 x computes the base-2 logarithm of the elements in x and returns the result in a new matrix.

val log1p : ('a, 'b) t -> ('a, 'b) t

log1p x computes log (1 + x) of the elements in x and returns the result in a new matrix.

val sin : ('a, 'b) t -> ('a, 'b) t

sin x computes the sine of the elements in x and returns the result in a new matrix.

val cos : ('a, 'b) t -> ('a, 'b) t

cos x computes the cosine of the elements in x and returns the result in a new matrix.

val tan : ('a, 'b) t -> ('a, 'b) t

tan x computes the tangent of the elements in x and returns the result in a new matrix.

val asin : ('a, 'b) t -> ('a, 'b) t

asin x computes the arc sine of the elements in x and returns the result in a new matrix.

val acos : ('a, 'b) t -> ('a, 'b) t

acos x computes the arc cosine of the elements in x and returns the result in a new matrix.

val atan : ('a, 'b) t -> ('a, 'b) t

atan x computes the arc tangent of the elements in x and returns the result in a new matrix.

val sinh : ('a, 'b) t -> ('a, 'b) t

sinh x computes the hyperbolic sine of the elements in x and returns the result in a new matrix.

val cosh : ('a, 'b) t -> ('a, 'b) t

cosh x computes the hyperbolic cosine of the elements in x and returns the result in a new matrix.

val tanh : ('a, 'b) t -> ('a, 'b) t

tanh x computes the hyperbolic tangent of the elements in x and returns the result in a new matrix.

val asinh : ('a, 'b) t -> ('a, 'b) t

asinh x computes the hyperbolic arc sine of the elements in x and returns the result in a new matrix.

val acosh : ('a, 'b) t -> ('a, 'b) t

acosh x computes the hyperbolic arc cosine of the elements in x and returns the result in a new matrix.

val atanh : ('a, 'b) t -> ('a, 'b) t

atanh x computes the hyperbolic arc tangent of the elements in x and returns the result in a new matrix.

val floor : ('a, 'b) t -> ('a, 'b) t

floor x computes the floor of the elements in x and returns the result in a new matrix.

val ceil : ('a, 'b) t -> ('a, 'b) t

ceil x computes the ceiling of the elements in x and returns the result in a new matrix.

val round : ('a, 'b) t -> ('a, 'b) t

round x rounds the elements in x and returns the result in a new matrix.

val trunc : ('a, 'b) t -> ('a, 'b) t

trunc x computes the truncation of the elements in x and returns the result in a new matrix.

val fix : ('a, 'b) t -> ('a, 'b) t

fix x rounds each element of x to the nearest integer toward zero. For positive elements, the behavior is the same as floor. For negative ones, the behavior is the same as ceil.

val modf : ('a, 'b) t -> ('a, 'b) t * ('a, 'b) t

modf x performs modf over all the elements in x, the fractal part is saved in the first element of the returned tuple whereas the integer part is saved in the second element.

val erf : (float, 'a) t -> (float, 'a) t

erf x computes the error function of the elements in x and returns the result in a new matrix.

val erfc : (float, 'a) t -> (float, 'a) t

erfc x computes the complementary error function of the elements in x and returns the result in a new matrix.

val logistic : (float, 'a) t -> (float, 'a) t

logistic x computes the logistic function 1/(1 + exp(-a) of the elements in x and returns the result in a new matrix.

val relu : (float, 'a) t -> (float, 'a) t

relu x computes the rectified linear unit function max(x, 0) of the elements in x and returns the result in a new matrix.

val elu : ?alpha:float -> (float, 'a) t -> (float, 'a) t

refer to Owl_dense_ndarray_generic.elu

val leaky_relu : ?alpha:float -> (float, 'a) t -> (float, 'a) t

refer to Owl_dense_ndarray_generic.leaky_relu

val softplus : (float, 'a) t -> (float, 'a) t

softplus x computes the softplus function log(1 + exp(x) of the elements in x and returns the result in a new matrix.

val softsign : (float, 'a) t -> (float, 'a) t

softsign x computes the softsign function x / (1 + abs(x)) of the elements in x and returns the result in a new matrix.

val softmax : ?axis:int -> (float, 'a) t -> (float, 'a) t

softmax x computes the softmax functions (exp x) / (sum (exp x)) of all the elements along the specified axis in x and returns the result in a new ndarray.

val sigmoid : (float, 'a) t -> (float, 'a) t

sigmoid x computes the sigmoid function 1 / (1 + exp (-x)) for each element in x.

val log_sum_exp' : (float, 'a) t -> float

log_sum_exp x computes the logarithm of the sum of exponentials of all the elements in x.

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - (float, 'a) t -> - (float, 'a) t

log_sum_exp ~axis x computes the logarithm of the sum of exponentials of all the elements in x along axis axis.

val l1norm : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

l1norm x calculates the l1-norm of of x along specified axis.

val l1norm' : ('a, 'b) t -> 'a

l1norm x calculates the l1-norm of all the element in x.

val l2norm : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

l2norm x calculates the l2-norm of of x along specified axis.

val l2norm' : ('a, 'b) t -> 'a

l2norm x calculates the l2-norm of all the element in x.

val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

l2norm x calculates the square l2-norm of of x along specified axis.

val l2norm_sqr' : ('a, 'b) t -> 'a

l2norm_sqr x calculates the square of l2-norm (or l2norm, Euclidean norm) of all elements in x. The function uses conjugate transpose in the product, hence it always returns a float number.

val vecnorm : - ?axis:int -> - ?p:float -> - ?keep_dims:bool -> - ('a, 'b) t -> - ('a, 'b) t

Refer to :doc:`owl_dense_ndarray_generic`.

val vecnorm' : ?p:float -> ('a, 'b) t -> 'a

Refer to :doc:`owl_dense_ndarray_generic`.

val max_pool : - ?padding:Owl_types.padding -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`.

val avg_pool : - ?padding:Owl_types.padding -> - (float, 'a) t -> - int array -> - int array -> - (float, 'a) t

Refer to :doc:`owl_dense_ndarray_generic`.

val cumsum : ?axis:int -> ('a, 'b) t -> ('a, 'b) t

cumsum ~axis x, refer to the documentation in Owl_dense_ndarray_generic.

val cumprod : ?axis:int -> ('a, 'b) t -> ('a, 'b) t

cumprod ~axis x, refer to the documentation in Owl_dense_ndarray_generic.

val cummin : ?axis:int -> ('a, 'b) t -> ('a, 'b) t

cummin ~axis x : performs cumulative min along axis dimension.

val cummax : ?axis:int -> ('a, 'b) t -> ('a, 'b) t

cummax ~axis x : performs cumulative max along axis dimension.

val diff : ?axis:int -> ?n:int -> ('a, 'b) t -> ('a, 'b) t

diff ~axis ~n x calculates the n-th difference of x along the specified axis.

Parameters: * axis: axis to calculate the difference. The default value is the highest dimension. * n: how many times to calculate the difference. The default value is 1.

Return: * The difference ndarray y. Note the shape of y 1 less than that of x along specified axis.

val angle : (Stdlib.Complex.t, 'a) t -> (Stdlib.Complex.t, 'a) t

angle x calculates the phase angle of all complex numbers in x.

val proj : (Stdlib.Complex.t, 'a) t -> (Stdlib.Complex.t, 'a) t

proj x computes the projection on Riemann sphere of all elelments in x.

val mat2gray : ?amin:'a -> ?amax:'a -> ('a, 'b) t -> ('a, 'b) t

mat2gray ~amin ~amax x converts the matrix x to the intensity image. The elements in x are clipped by amin and amax, and they will be between 0. and 1. after conversion to represents the intensity.

val lgamma : ('a, 'b) t -> ('a, 'b) t

lgamma x computes the loggamma of the elements in x and returns the result in a new matrix.

val dawsn : ('a, 'b) t -> ('a, 'b) t

dawsn x computes the Dawson function of the elements in x and returns the result in a new matrix.

val i0 : ('a, 'b) t -> ('a, 'b) t

i0 x computes the modified Bessel function of order 0 of the elements in x and returns the result in a new ndarray.

val i0e : ('a, 'b) t -> ('a, 'b) t

i0e x computes the exponentially scaled modified Bessel function of order 0 of the elements in x and returns the result in a new ndarray.

val i1 : ('a, 'b) t -> ('a, 'b) t

i1 x computes the modified Bessel function of order 1 of the elements in x and returns the result in a new ndarray.

val i1e : ('a, 'b) t -> ('a, 'b) t

i1e x computes the exponentially scaled modified Bessel function of order 1 of the elements in x and returns the result in a new ndarray.

val iv : v:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

iv v x computes modified Bessel function of x of real order v

val scalar_iv : v:'a -> ('a, 'b) t -> ('a, 'b) t

scalar_iv v x computes the modified Bessel function of x of real order v.

val iv_scalar : v:('a, 'b) t -> 'a -> ('a, 'b) t

iv_scalar v x computes modified Bessel function of x of real order v

val j0 : ('a, 'b) t -> ('a, 'b) t

j0 x computes the Bessel function of order 0 of the elements in x and returns the result in a new ndarray.

val j1 : ('a, 'b) t -> ('a, 'b) t

j1 x computes the Bessel function of order 1 of the elements in x and returns the result in a new ndarray.

val jv : v:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

jv v x computes Bessel function the first kind of x of real order v

val scalar_jv : v:'a -> ('a, 'b) t -> ('a, 'b) t

scalar_jv v x computes the Bessel function of the first kind of x of real order v.

val jv_scalar : v:('a, 'b) t -> 'a -> ('a, 'b) t

jv_scalar v x computes Bessel function of the first kind of x of real order v

Binary math operators
val add : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

add x y adds all the elements in x and y elementwise, and returns the result in a new matrix.

val sub : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

sub x y subtracts all the elements in x and y elementwise, and returns the result in a new matrix.

val mul : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

mul x y multiplies all the elements in x and y elementwise, and returns the result in a new matrix.

val div : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

div x y divides all the elements in x and y elementwise, and returns the result in a new matrix.

val add_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

add_scalar x a adds a scalar value a to each element in x, and returns the result in a new matrix.

val sub_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

sub_scalar x a subtracts a scalar value a from each element in x, and returns the result in a new matrix.

val mul_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

mul_scalar x a multiplies each element in x by a scalar value a, and returns the result in a new matrix.

val div_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

div_scalar x a divides each element in x by a scalar value a, and returns the result in a new matrix.

val scalar_add : 'a -> ('a, 'b) t -> ('a, 'b) t

scalar_add a x adds a scalar value a to each element in x, and returns the result in a new matrix.

val scalar_sub : 'a -> ('a, 'b) t -> ('a, 'b) t

scalar_sub a x subtracts each element in x from a scalar value a, and returns the result in a new matrix.

val scalar_mul : 'a -> ('a, 'b) t -> ('a, 'b) t

scalar_mul a x multiplies each element in x by a scalar value a, and returns the result in a new matrix.

val scalar_div : 'a -> ('a, 'b) t -> ('a, 'b) t

scalar_div a x divides a scalar value a by each element in x, and returns the result in a new matrix.

val dot : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

dot x y returns the matrix product of matrix x and y.

val add_diag : ('a, 'b) t -> 'a -> ('a, 'b) t

add_diag x a adds a to the diagonal elements in x. A new copy of the data is returned.

val pow : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

pow x y computes pow(a, b) of all the elements in x and y elementwise, and returns the result in a new matrix.

val scalar_pow : 'a -> ('a, 'b) t -> ('a, 'b) t

scalar_pow a x

val pow_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

pow_scalar x a

val atan2 : (float, 'a) t -> (float, 'a) t -> (float, 'a) t

atan2 x y computes atan2(a, b) of all the elements in x and y elementwise, and returns the result in a new matrix.

val scalar_atan2 : float -> (float, 'a) t -> (float, 'a) t

scalar_atan2 a x

val atan2_scalar : (float, 'a) t -> float -> (float, 'a) t

scalar_atan2 x a

val hypot : (float, 'a) t -> (float, 'a) t -> (float, 'a) t

hypot x y computes sqrt(x*x + y*y) of all the elements in x and y elementwise, and returns the result in a new matrix.

val min2 : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

min2 x y computes the minimum of all the elements in x and y elementwise, and returns the result in a new matrix.

val max2 : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

max2 x y computes the maximum of all the elements in x and y elementwise, and returns the result in a new matrix.

val fmod : (float, 'a) t -> (float, 'a) t -> (float, 'a) t

fmod x y performs float modulus division.

val fmod_scalar : (float, 'a) t -> float -> (float, 'a) t

fmod_scalar x a performs mod division between x and scalar a.

val scalar_fmod : float -> (float, 'a) t -> (float, 'a) t

scalar_fmod x a performs mod division between scalar a and x.

val ssqr' : ('a, 'b) t -> 'a -> 'a

ssqr x a computes the sum of squared differences of all the elements in x from constant a. This function only computes the square of each element rather than the conjugate transpose as sqr_nrm2 does.

val ssqr_diff' : ('a, 'b) t -> ('a, 'b) t -> 'a

ssqr_diff x y computes the sum of squared differences of every elements in x and its corresponding element in y.

val cross_entropy' : (float, 'a) t -> (float, 'a) t -> float

cross_entropy x y calculates the cross entropy between x and y using base e.

val clip_by_value : ?amin:'a -> ?amax:'a -> ('a, 'b) t -> ('a, 'b) t

clip_by_value ~amin ~amax x clips the elements in x based on amin and amax. The elements smaller than amin will be set to amin, and the elements greater than amax will be set to amax.

val clip_by_l2norm : float -> (float, 'a) t -> (float, 'a) t

clip_by_l2norm t x clips the x according to the threshold set by t.

val cov : ?b:('a, 'b) t -> a:('a, 'b) t -> ('a, 'b) t

cov ~a calculates the covariance matrix of a wherein each row represents one observation and each column represents one random variable. a is normalised by the number of observations-1. If there is only one observation, it is normalised by 1.

cov ~a ~b takes two matrices as inputs. The functions flatten a and b first then returns a 2 x 2 matrix, so two must have the same number of elements.

val kron : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

kron a b calculates the Kronecker product between the matrices a and b. If a is an m x n matrix and b is a p x q matrix, then kron(a,b) is an m*p x n*q matrix formed by taking all possible products between the elements of a and the matrix b.

val fma : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

fma x y z calculates the `fused multiply add`, i.e. (x * y) + z.

Cast functions
val cast : ('a, 'b) Owl_dense_ndarray_generic.kind -> ('c, 'd) t -> ('a, 'b) t

cast kind x casts x of type ('c, 'd) t to type ('a, 'b) t specify by the passed in kind parameter. This function is a generalisation of the other type casting functions such as cast_s2d, cast_c2z, and etc.

val cast_s2d : - (float, Stdlib.Bigarray.float32_elt) t -> - (float, Stdlib.Bigarray.float64_elt) t

cast_s2d x casts x from float32 to float64.

val cast_d2s : - (float, Stdlib.Bigarray.float64_elt) t -> - (float, Stdlib.Bigarray.float32_elt) t

cast_d2s x casts x from float64 to float32.

val cast_c2z : - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t

cast_c2z x casts x from complex32 to complex64.

val cast_z2c : - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t

cast_z2c x casts x from complex64 to complex32.

val cast_s2c : - (float, Stdlib.Bigarray.float32_elt) t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t

cast_s2c x casts x from float32 to complex32.

val cast_d2z : - (float, Stdlib.Bigarray.float64_elt) t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t

cast_d2z x casts x from float64 to complex64.

val cast_s2z : - (float, Stdlib.Bigarray.float32_elt) t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t

cast_s2z x casts x from float32 to complex64.

val cast_d2c : - (float, Stdlib.Bigarray.float64_elt) t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t

cast_d2c x casts x from float64 to complex32.

In-place modification
val create_ : out:('a, 'b) t -> 'a -> unit

TODO

val uniform_ : ?a:'a -> ?b:'a -> out:('a, 'b) t -> unit

TODO

val bernoulli_ : ?p:float -> out:('a, 'b) t -> unit

TODO

val zeros_ : out:('a, 'b) t -> unit

TODO

val ones_ : out:('a, 'b) t -> unit

TODO

val one_hot_ : out:('a, 'b) t -> int -> ('a, 'b) t -> unit

TODO

val sort_ : ('a, 'b) t -> unit

sort_ x performs in-place quicksort of the elelments in x.

val copy_ : out:('a, 'b) t -> ('a, 'b) t -> unit

copy_ ~out src copies the data from ndarray src to destination out.

val reshape_ : out:('a, 'b) t -> ('a, 'b) t -> unit

TODO

val transpose_ : out:('a, 'b) t -> ?axis:int array -> ('a, 'b) t -> unit

transpose_ ~out x is similar to transpose x but the output is written to out.

val sum_ : out:('a, 'b) t -> axis:int -> ('a, 'b) t -> unit

TODO

val min_ : out:('a, 'b) t -> axis:int -> ('a, 'b) t -> unit

TODO

val max_ : out:('a, 'b) t -> axis:int -> ('a, 'b) t -> unit

TODO

val add_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

add_ x y is similar to add function but the output is written to out. You need to make sure out is big enough to hold the output result.

val sub_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

sub_ x y is similar to sub function but the output is written to out. You need to make sure out is big enough to hold the output result.

val mul_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

mul_ x y is similar to mul function but the output is written to out. You need to make sure out is big enough to hold the output result.

val div_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

div_ x y is similar to div function but the output is written to out. You need to make sure out is big enough to hold the output result.

val pow_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

pow_ x y is similar to pow function but the output is written to out. You need to make sure out is big enough to hold the output result.

val atan2_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

atan2_ x y is similar to atan2 function but the output is written to out. You need to make sure out is big enough to hold the output result.

val hypot_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

hypot_ x y is similar to hypot function but the output is written to out. You need to make sure out is big enough to hold the output result.

val fmod_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

fmod_ x y is similar to fmod function but the output is written to out. You need to make sure out is big enough to hold the output result.

val min2_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

min2_ x y is similar to min2 function but the output is written to out. You need to make sure out is big enough to hold the output result.

val max2_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

max2_ x y is similar to max2 function but the output is written to out. You need to make sure out is big enough to hold the output result.

val add_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

add_scalar_ x y is similar to add_scalar function but the output is written to x.

val sub_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

sub_scalar_ x y is similar to sub_scalar function but the output is written to x.

val mul_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

mul_scalar_ x y is similar to mul_scalar function but the output is written to x.

val div_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

div_scalar_ x y is similar to div_scalar function but the output is written to x.

val pow_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

pow_scalar_ x y is similar to pow_scalar function but the output is written to x.

val atan2_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

atan2_scalar_ x y is similar to atan2_scalar function but the output is written to x.

val fmod_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

fmod_scalar_ x y is similar to fmod_scalar function but the output is written to x.

val scalar_add_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

scalar_add_ a x is similar to scalar_add function but the output is written to x.

val scalar_sub_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

scalar_sub_ a x is similar to scalar_sub function but the output is written to x.

val scalar_mul_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

scalar_mul_ a x is similar to scalar_mul function but the output is written to x.

val scalar_div_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

scalar_div_ a x is similar to scalar_div function but the output is written to x.

val scalar_pow_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

scalar_pow_ a x is similar to scalar_pow function but the output is written to x.

val scalar_atan2_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

scalar_atan2_ a x is similar to scalar_atan2 function but the output is written to x.

val scalar_fmod_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

scalar_fmod_ a x is similar to scalar_fmod function but the output is written to x.

val fma_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

fma_ ~out x y z is similar to fma x y z function but the output is written to out.

val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:'a -> - ?beta:'a -> - c:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - unit

Refer to :doc:`owl_dense_matrix_generic`

val conj_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

conj_ x is similar to conj but output is written to x

val abs_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

abs_ x is similar to abs but output is written to x

val neg_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

neg_ x is similar to neg but output is written to x

val reci_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

reci_ x is similar to reci but output is written to x

val signum_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

signum_ x is similar to signum but output is written to x

val sqr_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

sqr_ x is similar to sqr but output is written to x

val sqrt_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

sqrt_ x is similar to sqrt but output is written to x

val cbrt_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

cbrt_ x is similar to cbrt but output is written to x

val exp_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

exp_ x is similar to exp_ but output is written to x

val exp2_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

exp2_ x is similar to exp2 but output is written to x

val exp10_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

exp2_ x is similar to exp2 but output is written to x

val expm1_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

expm1_ x is similar to expm1 but output is written to x

val log_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

log_ x is similar to log but output is written to x

val log2_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

log2_ x is similar to log2 but output is written to x

val log10_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

log10_ x is similar to log10 but output is written to x

val log1p_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

log1p_ x is similar to log1p but output is written to x

val sin_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

sin_ x is similar to sin but output is written to x

val cos_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

cos_ x is similar to cos but output is written to x

val tan_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

tan_ x is similar to tan but output is written to x

val asin_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

asin_ x is similar to asin but output is written to x

val acos_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

acos_ x is similar to acos but output is written to x

val atan_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

atan_ x is similar to atan but output is written to x

val sinh_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

sinh_ x is similar to sinh but output is written to x

val cosh_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

cosh_ x is similar to cosh but output is written to x

val tanh_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

tanh_ x is similar to tanh but output is written to x

val asinh_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

asinh_ x is similar to asinh but output is written to x

val acosh_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

acosh_ x is similar to acosh but output is written to x

val atanh_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

atanh_ x is similar to atanh but output is written to x

val floor_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

floor_ x is similar to floor but output is written to x

val ceil_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

ceil_ x is similar to ceil but output is written to x

val round_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

round_ x is similar to round but output is written to x

val trunc_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

trunc_ x is similar to trunc but output is written to x

val fix_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

fix_ x is similar to fix but output is written to x

val erf_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

erf_ x is similar to erf but output is written to x

val erfc_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

erfc_ x is similar to erfc but output is written to x

val relu_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

relu_ x is similar to relu but output is written to x

val softplus_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

softplus_ x is similar to softplus but output is written to x

val softsign_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

softsign_ x is similar to softsign but output is written to x

val sigmoid_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

sigmoid_ x is similar to sigmoid but output is written to x

val softmax_ : ?out:('a, 'b) t -> ?axis:int -> ('a, 'b) t -> unit

softmax_ x is similar to softmax but output is written to x

val cumsum_ : ?out:('a, 'b) t -> ?axis:int -> ('a, 'b) t -> unit

cumsum_ x is similar to cumsum but output is written to x

val cumprod_ : ?out:('a, 'b) t -> ?axis:int -> ('a, 'b) t -> unit

cumprod_ x is similar to cumprod but output is written to x

val cummin_ : ?out:('a, 'b) t -> ?axis:int -> ('a, 'b) t -> unit

cummin_ x is similar to cummin but output is written to x

val cummax_ : ?out:('a, 'b) t -> ?axis:int -> ('a, 'b) t -> unit

cummax_ x is similar to cummax but output is written to x

val dropout_ : ?out:('a, 'b) t -> ?rate:float -> ('a, 'b) t -> unit

dropout_ x is similar to dropout but output is written to x

val elt_equal_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

elt_equal_ x y is similar to elt_equal function but the output is written to out. You need to make sure out is big enough to hold the output result.

val elt_not_equal_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

elt_not_equal_ x y is similar to elt_not_equal function but the output is written to out. You need to make sure out is big enough to hold the output result.

val elt_less_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

elt_less_ x y is similar to elt_less function but the output is written to out. You need to make sure out is big enough to hold the output result.

val elt_greater_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

elt_greater_ x y is similar to elt_greater function but the output is written to out. You need to make sure out is big enough to hold the output result.

val elt_less_equal_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

elt_less_equal_ x y is similar to elt_less_equal function but the output is written to out. You need to make sure out is big enough to hold the output result.

val elt_greater_equal_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

elt_greater_equal_ x y is similar to elt_greater_equal function but the output is written to out. You need to make sure out is big enough to hold the output result.

val elt_equal_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

elt_equal_scalar_ x a is similar to elt_equal_scalar function but the output is written to x.

val elt_not_equal_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

elt_not_equal_scalar_ x a is similar to elt_not_equal_scalar function but the output is written to x.

val elt_less_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

elt_less_scalar_ x a is similar to elt_less_scalar function but the output is written to x.

val elt_greater_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

elt_greater_scalar_ x a is similar to elt_greater_scalar function but the output is written to x.

val elt_less_equal_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

elt_less_equal_scalar_ x a is similar to elt_less_equal_scalar function but the output is written to x.

val elt_greater_equal_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

elt_greater_equal_scalar_ x a is similar to elt_greater_equal_scalar function but the output is written to x.

include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (-$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (*$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (/$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (!=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<>.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (%) : - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val (%$) : - (float, 'a) Owl_dense_matrix_generic.t -> - float -> - (float, 'a) Owl_dense_matrix_generic.t
val (**) : - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val (**$) : - (float, 'a) Owl_dense_matrix_generic.t -> - float -> - (float, 'a) Owl_dense_matrix_generic.t
val (+=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (@=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (@||) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.!{;..}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list array -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list array -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
include sig ... end
val (*@) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.%{}) : ('a, 'b) Owl_dense_matrix_generic.t -> (int * int) -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_matrix_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_matrix_generic.t -> (int * int) -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int array -> - 'a -> - unit
include sig ... end
val (**@) : - ('a, 'b) Owl_linalg_generic.t -> - float -> - ('a, 'b) Owl_linalg_generic.t
val (/@) : - ('a, 'b) Owl_linalg_generic.t -> - ('a, 'b) Owl_linalg_generic.t -> - ('a, 'b) Owl_linalg_generic.t
val inv : ('a, 'b) Owl_linalg_generic.t -> ('a, 'b) Owl_linalg_generic.t
val mpow : - ('a, 'b) Owl_linalg_generic.t -> - float -> - ('a, 'b) Owl_linalg_generic.t
\ No newline at end of file diff --git a/owl/Owl_dense_matrix/Operator/index.html b/owl/Owl_dense_matrix/Operator/index.html deleted file mode 100644 index b4256131c..000000000 --- a/owl/Owl_dense_matrix/Operator/index.html +++ /dev/null @@ -1,180 +0,0 @@ - -Operator (owl.Owl_dense_matrix.Operator)

Module Owl_dense_matrix.Operator

include sig ... end
val (+$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (-$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (*$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (/$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (!=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<>.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (%) : - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val (%$) : - (float, 'a) Owl_dense_matrix_generic.t -> - float -> - (float, 'a) Owl_dense_matrix_generic.t
val (**) : - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val (**$) : - (float, 'a) Owl_dense_matrix_generic.t -> - float -> - (float, 'a) Owl_dense_matrix_generic.t
val (+=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (@=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (@||) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.!{;..}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list array -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list array -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
include sig ... end
val (*@) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.%{}) : ('a, 'b) Owl_dense_matrix_generic.t -> (int * int) -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_matrix_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_matrix_generic.t -> (int * int) -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int array -> - 'a -> - unit
include sig ... end
val (**@) : - ('a, 'b) Owl_linalg_generic.t -> - float -> - ('a, 'b) Owl_linalg_generic.t
val (/@) : - ('a, 'b) Owl_linalg_generic.t -> - ('a, 'b) Owl_linalg_generic.t -> - ('a, 'b) Owl_linalg_generic.t
\ No newline at end of file diff --git a/owl/Owl_dense_matrix/S/index.html b/owl/Owl_dense_matrix/S/index.html deleted file mode 100644 index 71991630c..000000000 --- a/owl/Owl_dense_matrix/S/index.html +++ /dev/null @@ -1,225 +0,0 @@ - -S (owl.Owl_dense_matrix.S)

Module Owl_dense_matrix.S

include module type of struct include Owl_dense_matrix_s end
type elt = float
type mat = (float, Stdlib.Bigarray.float32_elt) Owl_dense_matrix_generic.t
include Owl_dense_matrix_intf.Common with type elt := elt and type mat := mat
Create dense matrices
val empty : int -> int -> mat
val create : int -> int -> elt -> mat
val init : int -> int -> (int -> elt) -> mat
val init_2d : int -> int -> (int -> int -> elt) -> mat
val zeros : int -> int -> mat
val ones : int -> int -> mat
val eye : int -> mat
val sequential : ?a:elt -> ?step:elt -> int -> int -> mat
val uniform : ?a:elt -> ?b:elt -> int -> int -> mat
val gaussian : ?mu:elt -> ?sigma:elt -> int -> int -> mat
val bernoulli : ?p:float -> int -> int -> mat
val unit_basis : int -> int -> mat
val diagm : ?k:int -> mat -> mat
val triu : ?k:int -> mat -> mat
val tril : ?k:int -> mat -> mat
val symmetric : ?upper:bool -> mat -> mat
val bidiagonal : ?upper:bool -> mat -> mat -> mat
val toeplitz : ?c:mat -> mat -> mat
val hankel : ?r:mat -> mat -> mat
val hadamard : int -> mat
val magic : int -> mat
Dense row vectors and meshgrids
val vector : int -> mat
val vector_zeros : int -> mat
val vector_ones : int -> mat
val vector_uniform : int -> mat
val linspace : elt -> elt -> int -> mat
val logspace : ?base:float -> elt -> elt -> int -> mat
val meshgrid : elt -> elt -> elt -> elt -> int -> int -> mat * mat
val meshup : mat -> mat -> mat * mat
Obtain the basic properties of a matrix
val shape : mat -> int * int
val row_num : mat -> int
val col_num : mat -> int
val numel : mat -> int
val nnz : mat -> int
val density : mat -> float
val size_in_bytes : mat -> int
val same_shape : mat -> mat -> bool
val same_data : mat -> mat -> bool
Manipulate a matrix
val get : mat -> int -> int -> elt
val set : mat -> int -> int -> elt -> unit
val get_index : mat -> int array array -> elt array
val set_index : mat -> int array array -> elt array -> unit
val get_fancy : Owl_types.index list -> mat -> mat
val set_fancy : Owl_types.index list -> mat -> mat -> unit
val get_slice : int list list -> mat -> mat
val set_slice : int list list -> mat -> mat -> unit
val row : mat -> int -> mat
val col : mat -> int -> mat
val rows : mat -> int array -> mat
val cols : mat -> int array -> mat
val resize : ?head:bool -> mat -> int array -> mat
val reshape : mat -> int array -> mat
val flatten : mat -> mat
val reverse : mat -> mat
val flip : ?axis:int -> mat -> mat
val rotate : mat -> int -> mat
val reset : mat -> unit
val fill : mat -> elt -> unit
val copy : mat -> mat
val copy_row_to : mat -> mat -> int -> unit
val copy_col_to : mat -> mat -> int -> unit
val concat_vertical : mat -> mat -> mat
val concat_horizontal : mat -> mat -> mat
val concat_vh : mat array array -> mat
val concatenate : ?axis:int -> mat array -> mat
val split : ?axis:int -> int array -> mat -> mat array
val split_vh : (int * int) array array -> mat -> mat array array
val transpose : mat -> mat
val ctranspose : mat -> mat
val swap_rows : mat -> int -> int -> unit
val swap_cols : mat -> int -> int -> unit
val tile : mat -> int array -> mat
val repeat : mat -> int array -> mat
val pad : ?v:elt -> int list list -> mat -> mat
val dropout : ?rate:float -> mat -> mat
val top : mat -> int -> int array array
val bottom : mat -> int -> int array array
val sort : mat -> mat
val argsort : - mat -> - (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
Iterate elements, columns, and rows.
val iteri : (int -> elt -> unit) -> mat -> unit
val iter : (elt -> unit) -> mat -> unit
val mapi : (int -> elt -> elt) -> mat -> mat
val map : (elt -> elt) -> mat -> mat
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> mat -> mat
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> mat -> mat
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> mat -> mat
val scan : ?axis:int -> (elt -> elt -> elt) -> mat -> mat
val filteri : (int -> elt -> bool) -> mat -> int array
val filter : (elt -> bool) -> mat -> int array
val iteri_2d : (int -> int -> elt -> unit) -> mat -> unit
val mapi_2d : (int -> int -> elt -> elt) -> mat -> mat
val foldi_2d : - ?axis:int -> - (int -> int -> elt -> elt -> elt) -> - elt -> - mat -> - mat
val scani_2d : ?axis:int -> (int -> int -> elt -> elt -> elt) -> mat -> mat
val filteri_2d : (int -> int -> elt -> bool) -> mat -> (int * int) array
val iter2i_2d : (int -> int -> elt -> elt -> unit) -> mat -> mat -> unit
val map2i_2d : (int -> int -> elt -> elt -> elt) -> mat -> mat -> mat
val iter2i : (int -> elt -> elt -> unit) -> mat -> mat -> unit
val iter2 : (elt -> elt -> unit) -> mat -> mat -> unit
val map2i : (int -> elt -> elt -> elt) -> mat -> mat -> mat
val map2 : (elt -> elt -> elt) -> mat -> mat -> mat
val iteri_rows : (int -> mat -> unit) -> mat -> unit
val iter_rows : (mat -> unit) -> mat -> unit
val iter2i_rows : (int -> mat -> mat -> unit) -> mat -> mat -> unit
val iter2_rows : (mat -> mat -> unit) -> mat -> mat -> unit
val iteri_cols : (int -> mat -> unit) -> mat -> unit
val iter_cols : (mat -> unit) -> mat -> unit
val filteri_rows : (int -> mat -> bool) -> mat -> int array
val filter_rows : (mat -> bool) -> mat -> int array
val filteri_cols : (int -> mat -> bool) -> mat -> int array
val filter_cols : (mat -> bool) -> mat -> int array
val fold_rows : ('a -> mat -> 'a) -> 'a -> mat -> 'a
val fold_cols : ('a -> mat -> 'a) -> 'a -> mat -> 'a
val mapi_rows : (int -> mat -> 'a) -> mat -> 'a array
val map_rows : (mat -> 'a) -> mat -> 'a array
val mapi_cols : (int -> mat -> 'a) -> mat -> 'a array
val map_cols : (mat -> 'a) -> mat -> 'a array
val mapi_by_row : int -> (int -> mat -> mat) -> mat -> mat
val map_by_row : int -> (mat -> mat) -> mat -> mat
val mapi_by_col : int -> (int -> mat -> mat) -> mat -> mat
val map_by_col : int -> (mat -> mat) -> mat -> mat
val mapi_at_row : (int -> elt -> elt) -> mat -> int -> mat
val map_at_row : (elt -> elt) -> mat -> int -> mat
val mapi_at_col : (int -> elt -> elt) -> mat -> int -> mat
val map_at_col : (elt -> elt) -> mat -> int -> mat
Examine elements and compare two matrices
val exists : (elt -> bool) -> mat -> bool
val not_exists : (elt -> bool) -> mat -> bool
val for_all : (elt -> bool) -> mat -> bool
val is_zero : mat -> bool
val is_positive : mat -> bool
val is_negative : mat -> bool
val is_nonpositive : mat -> bool
val is_nonnegative : mat -> bool
val is_normal : mat -> bool
val not_nan : mat -> bool
val not_inf : mat -> bool
val equal : mat -> mat -> bool
val not_equal : mat -> mat -> bool
val greater : mat -> mat -> bool
val less : mat -> mat -> bool
val greater_equal : mat -> mat -> bool
val less_equal : mat -> mat -> bool
val elt_equal : mat -> mat -> mat
val elt_not_equal : mat -> mat -> mat
val elt_less : mat -> mat -> mat
val elt_greater : mat -> mat -> mat
val elt_less_equal : mat -> mat -> mat
val elt_greater_equal : mat -> mat -> mat
val equal_scalar : mat -> elt -> bool
val not_equal_scalar : mat -> elt -> bool
val less_scalar : mat -> elt -> bool
val greater_scalar : mat -> elt -> bool
val less_equal_scalar : mat -> elt -> bool
val greater_equal_scalar : mat -> elt -> bool
val elt_equal_scalar : mat -> elt -> mat
val elt_not_equal_scalar : mat -> elt -> mat
val elt_less_scalar : mat -> elt -> mat
val elt_greater_scalar : mat -> elt -> mat
val elt_less_equal_scalar : mat -> elt -> mat
val elt_greater_equal_scalar : mat -> elt -> mat
val approx_equal : ?eps:float -> mat -> mat -> bool
val approx_equal_scalar : ?eps:float -> mat -> elt -> bool
val approx_elt_equal : ?eps:float -> mat -> mat -> mat
val approx_elt_equal_scalar : ?eps:float -> mat -> elt -> mat
Randomisation functions
val draw_rows : ?replacement:bool -> mat -> int -> mat * int array
val draw_cols : ?replacement:bool -> mat -> int -> mat * int array
val draw_rows2 : - ?replacement:bool -> - mat -> - mat -> - int -> - mat * mat * int array
val draw_cols2 : - ?replacement:bool -> - mat -> - mat -> - int -> - mat * mat * int array
val shuffle_rows : mat -> mat
val shuffle_cols : mat -> mat
val shuffle : mat -> mat
Input/Output and helper functions
val to_array : mat -> elt array
val of_array : elt array -> int -> int -> mat
val to_arrays : mat -> elt array array
val of_arrays : elt array array -> mat
val to_rows : mat -> mat array
val of_rows : mat array -> mat
val to_cols : mat -> mat array
val of_cols : mat array -> mat
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - mat -> - unit
val save : out:string -> mat -> unit
val load : string -> mat
val save_txt : ?sep:string -> ?append:bool -> out:string -> mat -> unit
val load_txt : ?sep:string -> string -> mat
val save_npy : out:string -> mat -> unit
val load_npy : string -> mat
Unary mathematical operations
val min : ?axis:int -> ?keep_dims:bool -> mat -> mat
val min' : mat -> elt
val max : ?axis:int -> ?keep_dims:bool -> mat -> mat
val max' : mat -> elt
val minmax : ?axis:int -> ?keep_dims:bool -> mat -> mat * mat
val minmax' : mat -> elt * elt
val min_i : mat -> elt * int array
val max_i : mat -> elt * int array
val minmax_i : mat -> (elt * int array) * (elt * int array)
val trace : mat -> elt
val sum : ?axis:int -> ?keep_dims:bool -> mat -> mat
val sum' : mat -> elt
val prod : ?axis:int -> ?keep_dims:bool -> mat -> mat
val prod' : mat -> elt
val mean : ?axis:int -> ?keep_dims:bool -> mat -> mat
val mean' : mat -> elt
val var' : mat -> elt
val std' : mat -> elt
val sem : ?axis:int -> ?keep_dims:bool -> mat -> mat
val sem' : mat -> elt
val sum_rows : ?keep_dims:bool -> mat -> mat
val sum_cols : ?keep_dims:bool -> mat -> mat
val mean_rows : ?keep_dims:bool -> mat -> mat
val mean_cols : ?keep_dims:bool -> mat -> mat
val abs : mat -> mat
val abs2 : mat -> mat
val conj : mat -> mat
val neg : mat -> mat
val reci : mat -> mat
val reci_tol : ?tol:elt -> mat -> mat
val sqr : mat -> mat
val sqrt : mat -> mat
val cbrt : mat -> mat
val exp : mat -> mat
val exp2 : mat -> mat
val exp10 : mat -> mat
val expm1 : mat -> mat
val log : mat -> mat
val log10 : mat -> mat
val log2 : mat -> mat
val log1p : mat -> mat
val sin : mat -> mat
val cos : mat -> mat
val tan : mat -> mat
val asin : mat -> mat
val acos : mat -> mat
val atan : mat -> mat
val sinh : mat -> mat
val cosh : mat -> mat
val tanh : mat -> mat
val asinh : mat -> mat
val acosh : mat -> mat
val atanh : mat -> mat
val floor : mat -> mat
val ceil : mat -> mat
val round : mat -> mat
val trunc : mat -> mat
val fix : mat -> mat
val modf : mat -> mat * mat
val l1norm : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l1norm' : mat -> elt
val l2norm : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l2norm' : mat -> elt
val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l2norm_sqr' : mat -> elt
val vecnorm : ?axis:int -> ?p:float -> ?keep_dims:bool -> mat -> mat
val vecnorm' : ?p:float -> mat -> elt
val cumsum : ?axis:int -> mat -> mat
val cumprod : ?axis:int -> mat -> mat
val cummin : ?axis:int -> mat -> mat
val cummax : ?axis:int -> mat -> mat
val diff : ?axis:int -> ?n:int -> mat -> mat
val var : ?axis:int -> ?keep_dims:bool -> mat -> mat
val std : ?axis:int -> ?keep_dims:bool -> mat -> mat
val mat2gray : ?amin:elt -> ?amax:elt -> mat -> mat
val lgamma : mat -> mat
val dawsn : mat -> mat
Binary mathematical operations
val add : mat -> mat -> mat
val sub : mat -> mat -> mat
val mul : mat -> mat -> mat
val div : mat -> mat -> mat
val add_scalar : mat -> elt -> mat
val sub_scalar : mat -> elt -> mat
val mul_scalar : mat -> elt -> mat
val div_scalar : mat -> elt -> mat
val scalar_add : elt -> mat -> mat
val scalar_sub : elt -> mat -> mat
val scalar_mul : elt -> mat -> mat
val scalar_div : elt -> mat -> mat
val dot : mat -> mat -> mat
val add_diag : mat -> elt -> mat
val pow : mat -> mat -> mat
val scalar_pow : elt -> mat -> mat
val pow_scalar : mat -> elt -> mat
val min2 : mat -> mat -> mat
val max2 : mat -> mat -> mat
val ssqr' : mat -> elt -> elt
val ssqr_diff' : mat -> mat -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> mat -> mat
val cov : ?b:mat -> a:mat -> mat
val kron : mat -> mat -> mat
val fma : mat -> mat -> mat -> mat
Functions of in-place modification
val create_ : out:mat -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:mat -> unit
val bernoulli_ : ?p:float -> out:mat -> unit
val zeros_ : out:mat -> unit
val ones_ : out:mat -> unit
val sort_ : mat -> unit
val one_hot_ : out:mat -> int -> mat -> unit
val copy_ : out:mat -> mat -> unit
val reshape_ : out:mat -> mat -> unit
val transpose_ : out:mat -> ?axis:int array -> mat -> unit
val sum_ : out:mat -> axis:int -> mat -> unit
val min_ : out:mat -> axis:int -> mat -> unit
val max_ : out:mat -> axis:int -> mat -> unit
val add_ : ?out:mat -> mat -> mat -> unit
val sub_ : ?out:mat -> mat -> mat -> unit
val mul_ : ?out:mat -> mat -> mat -> unit
val div_ : ?out:mat -> mat -> mat -> unit
val pow_ : ?out:mat -> mat -> mat -> unit
val atan2_ : ?out:mat -> mat -> mat -> unit
val hypot_ : ?out:mat -> mat -> mat -> unit
val fmod_ : ?out:mat -> mat -> mat -> unit
val min2_ : ?out:mat -> mat -> mat -> unit
val max2_ : ?out:mat -> mat -> mat -> unit
val add_scalar_ : ?out:mat -> mat -> elt -> unit
val sub_scalar_ : ?out:mat -> mat -> elt -> unit
val mul_scalar_ : ?out:mat -> mat -> elt -> unit
val div_scalar_ : ?out:mat -> mat -> elt -> unit
val pow_scalar_ : ?out:mat -> mat -> elt -> unit
val atan2_scalar_ : ?out:mat -> mat -> elt -> unit
val fmod_scalar_ : ?out:mat -> mat -> elt -> unit
val scalar_add_ : ?out:mat -> elt -> mat -> unit
val scalar_sub_ : ?out:mat -> elt -> mat -> unit
val scalar_mul_ : ?out:mat -> elt -> mat -> unit
val scalar_div_ : ?out:mat -> elt -> mat -> unit
val scalar_pow_ : ?out:mat -> elt -> mat -> unit
val scalar_atan2_ : ?out:mat -> elt -> mat -> unit
val scalar_fmod_ : ?out:mat -> elt -> mat -> unit
val fma_ : ?out:mat -> mat -> mat -> mat -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:mat -> - mat -> - mat -> - unit
val conj_ : ?out:mat -> mat -> unit
val abs_ : ?out:mat -> mat -> unit
val neg_ : ?out:mat -> mat -> unit
val reci_ : ?out:mat -> mat -> unit
val signum_ : ?out:mat -> mat -> unit
val sqr_ : ?out:mat -> mat -> unit
val sqrt_ : ?out:mat -> mat -> unit
val cbrt_ : ?out:mat -> mat -> unit
val exp_ : ?out:mat -> mat -> unit
val exp2_ : ?out:mat -> mat -> unit
val exp10_ : ?out:mat -> mat -> unit
val expm1_ : ?out:mat -> mat -> unit
val log_ : ?out:mat -> mat -> unit
val log2_ : ?out:mat -> mat -> unit
val log10_ : ?out:mat -> mat -> unit
val log1p_ : ?out:mat -> mat -> unit
val sin_ : ?out:mat -> mat -> unit
val cos_ : ?out:mat -> mat -> unit
val tan_ : ?out:mat -> mat -> unit
val asin_ : ?out:mat -> mat -> unit
val acos_ : ?out:mat -> mat -> unit
val atan_ : ?out:mat -> mat -> unit
val sinh_ : ?out:mat -> mat -> unit
val cosh_ : ?out:mat -> mat -> unit
val tanh_ : ?out:mat -> mat -> unit
val asinh_ : ?out:mat -> mat -> unit
val acosh_ : ?out:mat -> mat -> unit
val atanh_ : ?out:mat -> mat -> unit
val floor_ : ?out:mat -> mat -> unit
val ceil_ : ?out:mat -> mat -> unit
val round_ : ?out:mat -> mat -> unit
val trunc_ : ?out:mat -> mat -> unit
val fix_ : ?out:mat -> mat -> unit
val erf_ : ?out:mat -> mat -> unit
val erfc_ : ?out:mat -> mat -> unit
val relu_ : ?out:mat -> mat -> unit
val softplus_ : ?out:mat -> mat -> unit
val softsign_ : ?out:mat -> mat -> unit
val sigmoid_ : ?out:mat -> mat -> unit
val softmax_ : ?out:mat -> ?axis:int -> mat -> unit
val cumsum_ : ?out:mat -> ?axis:int -> mat -> unit
val cumprod_ : ?out:mat -> ?axis:int -> mat -> unit
val cummin_ : ?out:mat -> ?axis:int -> mat -> unit
val cummax_ : ?out:mat -> ?axis:int -> mat -> unit
val dropout_ : ?out:mat -> ?rate:float -> mat -> unit
val elt_equal_ : ?out:mat -> mat -> mat -> unit
val elt_not_equal_ : ?out:mat -> mat -> mat -> unit
val elt_less_ : ?out:mat -> mat -> mat -> unit
val elt_greater_ : ?out:mat -> mat -> mat -> unit
val elt_less_equal_ : ?out:mat -> mat -> mat -> unit
val elt_greater_equal_ : ?out:mat -> mat -> mat -> unit
val elt_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_not_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_less_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_greater_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_less_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_greater_equal_scalar_ : ?out:mat -> mat -> elt -> unit
include Owl_dense_matrix_intf.Real with type elt := elt and type mat := mat
Specific real functions
val i0 : mat -> mat
val i0e : mat -> mat
val i1 : mat -> mat
val i1e : mat -> mat
val iv : v:mat -> mat -> mat
val scalar_iv : v:elt -> mat -> mat
val iv_scalar : v:mat -> elt -> mat
val j0 : mat -> mat
val j1 : mat -> mat
val jv : v:mat -> mat -> mat
val scalar_jv : v:elt -> mat -> mat
val jv_scalar : v:mat -> elt -> mat
val semidef : int -> mat
val min_rows : mat -> (elt * int * int) array
val min_cols : mat -> (elt * int * int) array
val max_rows : mat -> (elt * int * int) array
val max_cols : mat -> (elt * int * int) array
val signum : mat -> mat
val erf : mat -> mat
val erfc : mat -> mat
val logistic : mat -> mat
val relu : mat -> mat
val elu : ?alpha:elt -> mat -> mat
val leaky_relu : ?alpha:elt -> mat -> mat
val softplus : mat -> mat
val softsign : mat -> mat
val softmax : ?axis:int -> mat -> mat
val sigmoid : mat -> mat
val log_sum_exp' : mat -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> mat -> mat
val max_pool : - ?padding:Owl_types.padding -> - mat -> - int array -> - int array -> - mat
val avg_pool : - ?padding:Owl_types.padding -> - mat -> - int array -> - int array -> - mat
val atan2 : mat -> mat -> mat
val scalar_atan2 : elt -> mat -> mat
val atan2_scalar : mat -> elt -> mat
val hypot : mat -> mat -> mat
val fmod : mat -> mat -> mat
val fmod_scalar : mat -> elt -> mat
val scalar_fmod : elt -> mat -> mat
val cross_entropy' : mat -> mat -> elt
val clip_by_l2norm : elt -> mat -> mat
val poisson : mu:elt -> int -> int -> mat
include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (-$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (*$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (/$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (!=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<>.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (%) : - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val (%$) : - (float, 'a) Owl_dense_matrix_generic.t -> - float -> - (float, 'a) Owl_dense_matrix_generic.t
val (**) : - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val (**$) : - (float, 'a) Owl_dense_matrix_generic.t -> - float -> - (float, 'a) Owl_dense_matrix_generic.t
val (+=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (@=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (@||) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.!{;..}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list array -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list array -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
include sig ... end
val (*@) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.%{}) : ('a, 'b) Owl_dense_matrix_generic.t -> (int * int) -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_matrix_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_matrix_generic.t -> (int * int) -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int array -> - 'a -> - unit
include sig ... end
val (**@) : - ('a, 'b) Owl_linalg_generic.t -> - float -> - ('a, 'b) Owl_linalg_generic.t
val (/@) : - ('a, 'b) Owl_linalg_generic.t -> - ('a, 'b) Owl_linalg_generic.t -> - ('a, 'b) Owl_linalg_generic.t
val mpow : Owl_linalg_s.mat -> float -> Owl_linalg_s.mat
val diag : - ?k:int -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
\ No newline at end of file diff --git a/owl/Owl_dense_matrix/Z/index.html b/owl/Owl_dense_matrix/Z/index.html deleted file mode 100644 index 3d0c69521..000000000 --- a/owl/Owl_dense_matrix/Z/index.html +++ /dev/null @@ -1,215 +0,0 @@ - -Z (owl.Owl_dense_matrix.Z)

Module Owl_dense_matrix.Z

include module type of struct include Owl_dense_matrix_z end
type elt = Stdlib.Complex.t
type mat = - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) Owl_dense_matrix_generic.t
type cast_mat = (float, Stdlib.Bigarray.float64_elt) Owl_dense_matrix_generic.t
include Owl_dense_matrix_intf.Common with type elt := elt and type mat := mat
Create dense matrices
val empty : int -> int -> mat
val create : int -> int -> elt -> mat
val init : int -> int -> (int -> elt) -> mat
val init_2d : int -> int -> (int -> int -> elt) -> mat
val zeros : int -> int -> mat
val ones : int -> int -> mat
val eye : int -> mat
val sequential : ?a:elt -> ?step:elt -> int -> int -> mat
val uniform : ?a:elt -> ?b:elt -> int -> int -> mat
val gaussian : ?mu:elt -> ?sigma:elt -> int -> int -> mat
val bernoulli : ?p:float -> int -> int -> mat
val unit_basis : int -> int -> mat
val diagm : ?k:int -> mat -> mat
val triu : ?k:int -> mat -> mat
val tril : ?k:int -> mat -> mat
val symmetric : ?upper:bool -> mat -> mat
val bidiagonal : ?upper:bool -> mat -> mat -> mat
val toeplitz : ?c:mat -> mat -> mat
val hankel : ?r:mat -> mat -> mat
val hadamard : int -> mat
val magic : int -> mat
Dense row vectors and meshgrids
val vector : int -> mat
val vector_zeros : int -> mat
val vector_ones : int -> mat
val vector_uniform : int -> mat
val linspace : elt -> elt -> int -> mat
val logspace : ?base:float -> elt -> elt -> int -> mat
val meshgrid : elt -> elt -> elt -> elt -> int -> int -> mat * mat
val meshup : mat -> mat -> mat * mat
Obtain the basic properties of a matrix
val shape : mat -> int * int
val row_num : mat -> int
val col_num : mat -> int
val numel : mat -> int
val nnz : mat -> int
val density : mat -> float
val size_in_bytes : mat -> int
val same_shape : mat -> mat -> bool
val same_data : mat -> mat -> bool
Manipulate a matrix
val get : mat -> int -> int -> elt
val set : mat -> int -> int -> elt -> unit
val get_index : mat -> int array array -> elt array
val set_index : mat -> int array array -> elt array -> unit
val get_fancy : Owl_types.index list -> mat -> mat
val set_fancy : Owl_types.index list -> mat -> mat -> unit
val get_slice : int list list -> mat -> mat
val set_slice : int list list -> mat -> mat -> unit
val row : mat -> int -> mat
val col : mat -> int -> mat
val rows : mat -> int array -> mat
val cols : mat -> int array -> mat
val resize : ?head:bool -> mat -> int array -> mat
val reshape : mat -> int array -> mat
val flatten : mat -> mat
val reverse : mat -> mat
val flip : ?axis:int -> mat -> mat
val rotate : mat -> int -> mat
val reset : mat -> unit
val fill : mat -> elt -> unit
val copy : mat -> mat
val copy_row_to : mat -> mat -> int -> unit
val copy_col_to : mat -> mat -> int -> unit
val concat_vertical : mat -> mat -> mat
val concat_horizontal : mat -> mat -> mat
val concat_vh : mat array array -> mat
val concatenate : ?axis:int -> mat array -> mat
val split : ?axis:int -> int array -> mat -> mat array
val split_vh : (int * int) array array -> mat -> mat array array
val transpose : mat -> mat
val ctranspose : mat -> mat
val diag : ?k:int -> mat -> mat
val swap_rows : mat -> int -> int -> unit
val swap_cols : mat -> int -> int -> unit
val tile : mat -> int array -> mat
val repeat : mat -> int array -> mat
val pad : ?v:elt -> int list list -> mat -> mat
val dropout : ?rate:float -> mat -> mat
val top : mat -> int -> int array array
val bottom : mat -> int -> int array array
val sort : mat -> mat
val argsort : - mat -> - (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
Iterate elements, columns, and rows.
val iteri : (int -> elt -> unit) -> mat -> unit
val iter : (elt -> unit) -> mat -> unit
val mapi : (int -> elt -> elt) -> mat -> mat
val map : (elt -> elt) -> mat -> mat
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> mat -> mat
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> mat -> mat
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> mat -> mat
val scan : ?axis:int -> (elt -> elt -> elt) -> mat -> mat
val filteri : (int -> elt -> bool) -> mat -> int array
val filter : (elt -> bool) -> mat -> int array
val iteri_2d : (int -> int -> elt -> unit) -> mat -> unit
val mapi_2d : (int -> int -> elt -> elt) -> mat -> mat
val foldi_2d : - ?axis:int -> - (int -> int -> elt -> elt -> elt) -> - elt -> - mat -> - mat
val scani_2d : ?axis:int -> (int -> int -> elt -> elt -> elt) -> mat -> mat
val filteri_2d : (int -> int -> elt -> bool) -> mat -> (int * int) array
val iter2i_2d : (int -> int -> elt -> elt -> unit) -> mat -> mat -> unit
val map2i_2d : (int -> int -> elt -> elt -> elt) -> mat -> mat -> mat
val iter2i : (int -> elt -> elt -> unit) -> mat -> mat -> unit
val iter2 : (elt -> elt -> unit) -> mat -> mat -> unit
val map2i : (int -> elt -> elt -> elt) -> mat -> mat -> mat
val map2 : (elt -> elt -> elt) -> mat -> mat -> mat
val iteri_rows : (int -> mat -> unit) -> mat -> unit
val iter_rows : (mat -> unit) -> mat -> unit
val iter2i_rows : (int -> mat -> mat -> unit) -> mat -> mat -> unit
val iter2_rows : (mat -> mat -> unit) -> mat -> mat -> unit
val iteri_cols : (int -> mat -> unit) -> mat -> unit
val iter_cols : (mat -> unit) -> mat -> unit
val filteri_rows : (int -> mat -> bool) -> mat -> int array
val filter_rows : (mat -> bool) -> mat -> int array
val filteri_cols : (int -> mat -> bool) -> mat -> int array
val filter_cols : (mat -> bool) -> mat -> int array
val fold_rows : ('a -> mat -> 'a) -> 'a -> mat -> 'a
val fold_cols : ('a -> mat -> 'a) -> 'a -> mat -> 'a
val mapi_rows : (int -> mat -> 'a) -> mat -> 'a array
val map_rows : (mat -> 'a) -> mat -> 'a array
val mapi_cols : (int -> mat -> 'a) -> mat -> 'a array
val map_cols : (mat -> 'a) -> mat -> 'a array
val mapi_by_row : int -> (int -> mat -> mat) -> mat -> mat
val map_by_row : int -> (mat -> mat) -> mat -> mat
val mapi_by_col : int -> (int -> mat -> mat) -> mat -> mat
val map_by_col : int -> (mat -> mat) -> mat -> mat
val mapi_at_row : (int -> elt -> elt) -> mat -> int -> mat
val map_at_row : (elt -> elt) -> mat -> int -> mat
val mapi_at_col : (int -> elt -> elt) -> mat -> int -> mat
val map_at_col : (elt -> elt) -> mat -> int -> mat
Examine elements and compare two matrices
val exists : (elt -> bool) -> mat -> bool
val not_exists : (elt -> bool) -> mat -> bool
val for_all : (elt -> bool) -> mat -> bool
val is_zero : mat -> bool
val is_positive : mat -> bool
val is_negative : mat -> bool
val is_nonpositive : mat -> bool
val is_nonnegative : mat -> bool
val is_normal : mat -> bool
val not_nan : mat -> bool
val not_inf : mat -> bool
val equal : mat -> mat -> bool
val not_equal : mat -> mat -> bool
val greater : mat -> mat -> bool
val less : mat -> mat -> bool
val greater_equal : mat -> mat -> bool
val less_equal : mat -> mat -> bool
val elt_equal : mat -> mat -> mat
val elt_not_equal : mat -> mat -> mat
val elt_less : mat -> mat -> mat
val elt_greater : mat -> mat -> mat
val elt_less_equal : mat -> mat -> mat
val elt_greater_equal : mat -> mat -> mat
val equal_scalar : mat -> elt -> bool
val not_equal_scalar : mat -> elt -> bool
val less_scalar : mat -> elt -> bool
val greater_scalar : mat -> elt -> bool
val less_equal_scalar : mat -> elt -> bool
val greater_equal_scalar : mat -> elt -> bool
val elt_equal_scalar : mat -> elt -> mat
val elt_not_equal_scalar : mat -> elt -> mat
val elt_less_scalar : mat -> elt -> mat
val elt_greater_scalar : mat -> elt -> mat
val elt_less_equal_scalar : mat -> elt -> mat
val elt_greater_equal_scalar : mat -> elt -> mat
val approx_equal : ?eps:float -> mat -> mat -> bool
val approx_equal_scalar : ?eps:float -> mat -> elt -> bool
val approx_elt_equal : ?eps:float -> mat -> mat -> mat
val approx_elt_equal_scalar : ?eps:float -> mat -> elt -> mat
Randomisation functions
val draw_rows : ?replacement:bool -> mat -> int -> mat * int array
val draw_cols : ?replacement:bool -> mat -> int -> mat * int array
val draw_rows2 : - ?replacement:bool -> - mat -> - mat -> - int -> - mat * mat * int array
val draw_cols2 : - ?replacement:bool -> - mat -> - mat -> - int -> - mat * mat * int array
val shuffle_rows : mat -> mat
val shuffle_cols : mat -> mat
val shuffle : mat -> mat
Input/Output and helper functions
val to_array : mat -> elt array
val of_array : elt array -> int -> int -> mat
val to_arrays : mat -> elt array array
val of_arrays : elt array array -> mat
val to_rows : mat -> mat array
val of_rows : mat array -> mat
val to_cols : mat -> mat array
val of_cols : mat array -> mat
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - mat -> - unit
val save : out:string -> mat -> unit
val load : string -> mat
val save_txt : ?sep:string -> ?append:bool -> out:string -> mat -> unit
val load_txt : ?sep:string -> string -> mat
val save_npy : out:string -> mat -> unit
val load_npy : string -> mat
Unary mathematical operations
val min : ?axis:int -> ?keep_dims:bool -> mat -> mat
val min' : mat -> elt
val max : ?axis:int -> ?keep_dims:bool -> mat -> mat
val max' : mat -> elt
val minmax : ?axis:int -> ?keep_dims:bool -> mat -> mat * mat
val minmax' : mat -> elt * elt
val min_i : mat -> elt * int array
val max_i : mat -> elt * int array
val minmax_i : mat -> (elt * int array) * (elt * int array)
val trace : mat -> elt
val sum : ?axis:int -> ?keep_dims:bool -> mat -> mat
val sum' : mat -> elt
val prod : ?axis:int -> ?keep_dims:bool -> mat -> mat
val prod' : mat -> elt
val mean : ?axis:int -> ?keep_dims:bool -> mat -> mat
val mean' : mat -> elt
val var' : mat -> elt
val std' : mat -> elt
val sem : ?axis:int -> ?keep_dims:bool -> mat -> mat
val sem' : mat -> elt
val sum_rows : ?keep_dims:bool -> mat -> mat
val sum_cols : ?keep_dims:bool -> mat -> mat
val mean_rows : ?keep_dims:bool -> mat -> mat
val mean_cols : ?keep_dims:bool -> mat -> mat
val abs : mat -> mat
val abs2 : mat -> mat
val conj : mat -> mat
val neg : mat -> mat
val reci : mat -> mat
val reci_tol : ?tol:elt -> mat -> mat
val sqr : mat -> mat
val sqrt : mat -> mat
val cbrt : mat -> mat
val exp : mat -> mat
val exp2 : mat -> mat
val exp10 : mat -> mat
val expm1 : mat -> mat
val log : mat -> mat
val log10 : mat -> mat
val log2 : mat -> mat
val log1p : mat -> mat
val sin : mat -> mat
val cos : mat -> mat
val tan : mat -> mat
val asin : mat -> mat
val acos : mat -> mat
val atan : mat -> mat
val sinh : mat -> mat
val cosh : mat -> mat
val tanh : mat -> mat
val asinh : mat -> mat
val acosh : mat -> mat
val atanh : mat -> mat
val floor : mat -> mat
val ceil : mat -> mat
val round : mat -> mat
val trunc : mat -> mat
val fix : mat -> mat
val modf : mat -> mat * mat
val l1norm : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l1norm' : mat -> elt
val l2norm : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l2norm' : mat -> elt
val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l2norm_sqr' : mat -> elt
val vecnorm : ?axis:int -> ?p:float -> ?keep_dims:bool -> mat -> mat
val vecnorm' : ?p:float -> mat -> elt
val cumsum : ?axis:int -> mat -> mat
val cumprod : ?axis:int -> mat -> mat
val cummin : ?axis:int -> mat -> mat
val cummax : ?axis:int -> mat -> mat
val diff : ?axis:int -> ?n:int -> mat -> mat
val var : ?axis:int -> ?keep_dims:bool -> mat -> mat
val std : ?axis:int -> ?keep_dims:bool -> mat -> mat
val mat2gray : ?amin:elt -> ?amax:elt -> mat -> mat
val lgamma : mat -> mat
val dawsn : mat -> mat
Binary mathematical operations
val add : mat -> mat -> mat
val sub : mat -> mat -> mat
val mul : mat -> mat -> mat
val div : mat -> mat -> mat
val add_scalar : mat -> elt -> mat
val sub_scalar : mat -> elt -> mat
val mul_scalar : mat -> elt -> mat
val div_scalar : mat -> elt -> mat
val scalar_add : elt -> mat -> mat
val scalar_sub : elt -> mat -> mat
val scalar_mul : elt -> mat -> mat
val scalar_div : elt -> mat -> mat
val dot : mat -> mat -> mat
val add_diag : mat -> elt -> mat
val pow : mat -> mat -> mat
val scalar_pow : elt -> mat -> mat
val pow_scalar : mat -> elt -> mat
val min2 : mat -> mat -> mat
val max2 : mat -> mat -> mat
val ssqr' : mat -> elt -> elt
val ssqr_diff' : mat -> mat -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> mat -> mat
val cov : ?b:mat -> a:mat -> mat
val kron : mat -> mat -> mat
val fma : mat -> mat -> mat -> mat
Functions of in-place modification
val create_ : out:mat -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:mat -> unit
val bernoulli_ : ?p:float -> out:mat -> unit
val zeros_ : out:mat -> unit
val ones_ : out:mat -> unit
val sort_ : mat -> unit
val one_hot_ : out:mat -> int -> mat -> unit
val copy_ : out:mat -> mat -> unit
val reshape_ : out:mat -> mat -> unit
val transpose_ : out:mat -> ?axis:int array -> mat -> unit
val sum_ : out:mat -> axis:int -> mat -> unit
val min_ : out:mat -> axis:int -> mat -> unit
val max_ : out:mat -> axis:int -> mat -> unit
val add_ : ?out:mat -> mat -> mat -> unit
val sub_ : ?out:mat -> mat -> mat -> unit
val mul_ : ?out:mat -> mat -> mat -> unit
val div_ : ?out:mat -> mat -> mat -> unit
val pow_ : ?out:mat -> mat -> mat -> unit
val atan2_ : ?out:mat -> mat -> mat -> unit
val hypot_ : ?out:mat -> mat -> mat -> unit
val fmod_ : ?out:mat -> mat -> mat -> unit
val min2_ : ?out:mat -> mat -> mat -> unit
val max2_ : ?out:mat -> mat -> mat -> unit
val add_scalar_ : ?out:mat -> mat -> elt -> unit
val sub_scalar_ : ?out:mat -> mat -> elt -> unit
val mul_scalar_ : ?out:mat -> mat -> elt -> unit
val div_scalar_ : ?out:mat -> mat -> elt -> unit
val pow_scalar_ : ?out:mat -> mat -> elt -> unit
val atan2_scalar_ : ?out:mat -> mat -> elt -> unit
val fmod_scalar_ : ?out:mat -> mat -> elt -> unit
val scalar_add_ : ?out:mat -> elt -> mat -> unit
val scalar_sub_ : ?out:mat -> elt -> mat -> unit
val scalar_mul_ : ?out:mat -> elt -> mat -> unit
val scalar_div_ : ?out:mat -> elt -> mat -> unit
val scalar_pow_ : ?out:mat -> elt -> mat -> unit
val scalar_atan2_ : ?out:mat -> elt -> mat -> unit
val scalar_fmod_ : ?out:mat -> elt -> mat -> unit
val fma_ : ?out:mat -> mat -> mat -> mat -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:mat -> - mat -> - mat -> - unit
val conj_ : ?out:mat -> mat -> unit
val abs_ : ?out:mat -> mat -> unit
val neg_ : ?out:mat -> mat -> unit
val reci_ : ?out:mat -> mat -> unit
val signum_ : ?out:mat -> mat -> unit
val sqr_ : ?out:mat -> mat -> unit
val sqrt_ : ?out:mat -> mat -> unit
val cbrt_ : ?out:mat -> mat -> unit
val exp_ : ?out:mat -> mat -> unit
val exp2_ : ?out:mat -> mat -> unit
val exp10_ : ?out:mat -> mat -> unit
val expm1_ : ?out:mat -> mat -> unit
val log_ : ?out:mat -> mat -> unit
val log2_ : ?out:mat -> mat -> unit
val log10_ : ?out:mat -> mat -> unit
val log1p_ : ?out:mat -> mat -> unit
val sin_ : ?out:mat -> mat -> unit
val cos_ : ?out:mat -> mat -> unit
val tan_ : ?out:mat -> mat -> unit
val asin_ : ?out:mat -> mat -> unit
val acos_ : ?out:mat -> mat -> unit
val atan_ : ?out:mat -> mat -> unit
val sinh_ : ?out:mat -> mat -> unit
val cosh_ : ?out:mat -> mat -> unit
val tanh_ : ?out:mat -> mat -> unit
val asinh_ : ?out:mat -> mat -> unit
val acosh_ : ?out:mat -> mat -> unit
val atanh_ : ?out:mat -> mat -> unit
val floor_ : ?out:mat -> mat -> unit
val ceil_ : ?out:mat -> mat -> unit
val round_ : ?out:mat -> mat -> unit
val trunc_ : ?out:mat -> mat -> unit
val fix_ : ?out:mat -> mat -> unit
val erf_ : ?out:mat -> mat -> unit
val erfc_ : ?out:mat -> mat -> unit
val relu_ : ?out:mat -> mat -> unit
val softplus_ : ?out:mat -> mat -> unit
val softsign_ : ?out:mat -> mat -> unit
val sigmoid_ : ?out:mat -> mat -> unit
val softmax_ : ?out:mat -> ?axis:int -> mat -> unit
val cumsum_ : ?out:mat -> ?axis:int -> mat -> unit
val cumprod_ : ?out:mat -> ?axis:int -> mat -> unit
val cummin_ : ?out:mat -> ?axis:int -> mat -> unit
val cummax_ : ?out:mat -> ?axis:int -> mat -> unit
val dropout_ : ?out:mat -> ?rate:float -> mat -> unit
val elt_equal_ : ?out:mat -> mat -> mat -> unit
val elt_not_equal_ : ?out:mat -> mat -> mat -> unit
val elt_less_ : ?out:mat -> mat -> mat -> unit
val elt_greater_ : ?out:mat -> mat -> mat -> unit
val elt_less_equal_ : ?out:mat -> mat -> mat -> unit
val elt_greater_equal_ : ?out:mat -> mat -> mat -> unit
val elt_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_not_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_less_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_greater_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_less_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_greater_equal_scalar_ : ?out:mat -> mat -> elt -> unit
include Owl_dense_matrix_intf.Complex - with type mat := mat - and type cast_mat := cast_mat
Specific complex functions
val complex : cast_mat -> cast_mat -> mat
val polar : cast_mat -> cast_mat -> mat
val re : mat -> cast_mat
val im : mat -> cast_mat
include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (-$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (*$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (/$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (!=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<>.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>=.) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_matrix_generic.t -> - 'a -> - ('a, 'b) Owl_dense_matrix_generic.t
val (%) : - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val (%$) : - (float, 'a) Owl_dense_matrix_generic.t -> - float -> - (float, 'a) Owl_dense_matrix_generic.t
val (**) : - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_matrix_generic.t -> - (float, 'a) Owl_dense_matrix_generic.t
val (**$) : - (float, 'a) Owl_dense_matrix_generic.t -> - float -> - (float, 'a) Owl_dense_matrix_generic.t
val (+=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_matrix_generic.t -> 'a -> unit
val (@=) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (@||) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.!{;..}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list array -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int list array -> - ('a, 'b) Owl_dense_matrix_generic.t -> - unit
include sig ... end
val (*@) : - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t -> - ('a, 'b) Owl_dense_matrix_generic.t
val (.%{}) : ('a, 'b) Owl_dense_matrix_generic.t -> (int * int) -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_matrix_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_matrix_generic.t -> (int * int) -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_matrix_generic.t -> - int array -> - 'a -> - unit
include sig ... end
val (**@) : - ('a, 'b) Owl_linalg_generic.t -> - float -> - ('a, 'b) Owl_linalg_generic.t
val (/@) : - ('a, 'b) Owl_linalg_generic.t -> - ('a, 'b) Owl_linalg_generic.t -> - ('a, 'b) Owl_linalg_generic.t
val mpow : Owl_linalg_z.mat -> float -> Owl_linalg_z.mat
\ No newline at end of file diff --git a/owl/Owl_dense_matrix_c/.dummy b/owl/Owl_dense_matrix_c/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_matrix_d/.dummy b/owl/Owl_dense_matrix_d/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_matrix_generic/.dummy b/owl/Owl_dense_matrix_generic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_matrix_intf/.dummy b/owl/Owl_dense_matrix_intf/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_matrix_intf/module-type-Common/index.html b/owl/Owl_dense_matrix_intf/module-type-Common/index.html deleted file mode 100644 index 2338f9ddd..000000000 --- a/owl/Owl_dense_matrix_intf/module-type-Common/index.html +++ /dev/null @@ -1,34 +0,0 @@ - -Common (owl.Owl_dense_matrix_intf.Common)

Module type Owl_dense_matrix_intf.Common

type elt
type mat
Create dense matrices
val empty : int -> int -> mat
val create : int -> int -> elt -> mat
val init : int -> int -> (int -> elt) -> mat
val init_2d : int -> int -> (int -> int -> elt) -> mat
val zeros : int -> int -> mat
val ones : int -> int -> mat
val eye : int -> mat
val sequential : ?a:elt -> ?step:elt -> int -> int -> mat
val uniform : ?a:elt -> ?b:elt -> int -> int -> mat
val gaussian : ?mu:elt -> ?sigma:elt -> int -> int -> mat
val bernoulli : ?p:float -> int -> int -> mat
val unit_basis : int -> int -> mat
val diagm : ?k:int -> mat -> mat
val triu : ?k:int -> mat -> mat
val tril : ?k:int -> mat -> mat
val symmetric : ?upper:bool -> mat -> mat
val bidiagonal : ?upper:bool -> mat -> mat -> mat
val toeplitz : ?c:mat -> mat -> mat
val hankel : ?r:mat -> mat -> mat
val hadamard : int -> mat
val magic : int -> mat
Dense row vectors and meshgrids
val vector : int -> mat
val vector_zeros : int -> mat
val vector_ones : int -> mat
val vector_uniform : int -> mat
val linspace : elt -> elt -> int -> mat
val logspace : ?base:float -> elt -> elt -> int -> mat
val meshgrid : elt -> elt -> elt -> elt -> int -> int -> mat * mat
val meshup : mat -> mat -> mat * mat
Obtain the basic properties of a matrix
val shape : mat -> int * int
val row_num : mat -> int
val col_num : mat -> int
val numel : mat -> int
val nnz : mat -> int
val density : mat -> float
val size_in_bytes : mat -> int
val same_shape : mat -> mat -> bool
val same_data : mat -> mat -> bool
Manipulate a matrix
val get : mat -> int -> int -> elt
val set : mat -> int -> int -> elt -> unit
val get_index : mat -> int array array -> elt array
val set_index : mat -> int array array -> elt array -> unit
val get_fancy : Owl_types.index list -> mat -> mat
val set_fancy : Owl_types.index list -> mat -> mat -> unit
val get_slice : int list list -> mat -> mat
val set_slice : int list list -> mat -> mat -> unit
val row : mat -> int -> mat
val col : mat -> int -> mat
val rows : mat -> int array -> mat
val cols : mat -> int array -> mat
val resize : ?head:bool -> mat -> int array -> mat
val reshape : mat -> int array -> mat
val flatten : mat -> mat
val reverse : mat -> mat
val flip : ?axis:int -> mat -> mat
val rotate : mat -> int -> mat
val reset : mat -> unit
val fill : mat -> elt -> unit
val copy : mat -> mat
val copy_row_to : mat -> mat -> int -> unit
val copy_col_to : mat -> mat -> int -> unit
val concat_vertical : mat -> mat -> mat
val concat_horizontal : mat -> mat -> mat
val concat_vh : mat array array -> mat
val concatenate : ?axis:int -> mat array -> mat
val split : ?axis:int -> int array -> mat -> mat array
val split_vh : (int * int) array array -> mat -> mat array array
val transpose : mat -> mat
val ctranspose : mat -> mat
val diag : ?k:int -> mat -> mat
val swap_rows : mat -> int -> int -> unit
val swap_cols : mat -> int -> int -> unit
val tile : mat -> int array -> mat
val repeat : mat -> int array -> mat
val pad : ?v:elt -> int list list -> mat -> mat
val dropout : ?rate:float -> mat -> mat
val top : mat -> int -> int array array
val bottom : mat -> int -> int array array
val sort : mat -> mat
val argsort : - mat -> - (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
Iterate elements, columns, and rows.
val iteri : (int -> elt -> unit) -> mat -> unit
val iter : (elt -> unit) -> mat -> unit
val mapi : (int -> elt -> elt) -> mat -> mat
val map : (elt -> elt) -> mat -> mat
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> mat -> mat
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> mat -> mat
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> mat -> mat
val scan : ?axis:int -> (elt -> elt -> elt) -> mat -> mat
val filteri : (int -> elt -> bool) -> mat -> int array
val filter : (elt -> bool) -> mat -> int array
val iteri_2d : (int -> int -> elt -> unit) -> mat -> unit
val mapi_2d : (int -> int -> elt -> elt) -> mat -> mat
val foldi_2d : - ?axis:int -> - (int -> int -> elt -> elt -> elt) -> - elt -> - mat -> - mat
val scani_2d : ?axis:int -> (int -> int -> elt -> elt -> elt) -> mat -> mat
val filteri_2d : (int -> int -> elt -> bool) -> mat -> (int * int) array
val iter2i_2d : (int -> int -> elt -> elt -> unit) -> mat -> mat -> unit
val map2i_2d : (int -> int -> elt -> elt -> elt) -> mat -> mat -> mat
val iter2i : (int -> elt -> elt -> unit) -> mat -> mat -> unit
val iter2 : (elt -> elt -> unit) -> mat -> mat -> unit
val map2i : (int -> elt -> elt -> elt) -> mat -> mat -> mat
val map2 : (elt -> elt -> elt) -> mat -> mat -> mat
val iteri_rows : (int -> mat -> unit) -> mat -> unit
val iter_rows : (mat -> unit) -> mat -> unit
val iter2i_rows : (int -> mat -> mat -> unit) -> mat -> mat -> unit
val iter2_rows : (mat -> mat -> unit) -> mat -> mat -> unit
val iteri_cols : (int -> mat -> unit) -> mat -> unit
val iter_cols : (mat -> unit) -> mat -> unit
val filteri_rows : (int -> mat -> bool) -> mat -> int array
val filter_rows : (mat -> bool) -> mat -> int array
val filteri_cols : (int -> mat -> bool) -> mat -> int array
val filter_cols : (mat -> bool) -> mat -> int array
val fold_rows : ('a -> mat -> 'a) -> 'a -> mat -> 'a
val fold_cols : ('a -> mat -> 'a) -> 'a -> mat -> 'a
val mapi_rows : (int -> mat -> 'a) -> mat -> 'a array
val map_rows : (mat -> 'a) -> mat -> 'a array
val mapi_cols : (int -> mat -> 'a) -> mat -> 'a array
val map_cols : (mat -> 'a) -> mat -> 'a array
val mapi_by_row : int -> (int -> mat -> mat) -> mat -> mat
val map_by_row : int -> (mat -> mat) -> mat -> mat
val mapi_by_col : int -> (int -> mat -> mat) -> mat -> mat
val map_by_col : int -> (mat -> mat) -> mat -> mat
val mapi_at_row : (int -> elt -> elt) -> mat -> int -> mat
val map_at_row : (elt -> elt) -> mat -> int -> mat
val mapi_at_col : (int -> elt -> elt) -> mat -> int -> mat
val map_at_col : (elt -> elt) -> mat -> int -> mat
Examine elements and compare two matrices
val exists : (elt -> bool) -> mat -> bool
val not_exists : (elt -> bool) -> mat -> bool
val for_all : (elt -> bool) -> mat -> bool
val is_zero : mat -> bool
val is_positive : mat -> bool
val is_negative : mat -> bool
val is_nonpositive : mat -> bool
val is_nonnegative : mat -> bool
val is_normal : mat -> bool
val not_nan : mat -> bool
val not_inf : mat -> bool
val equal : mat -> mat -> bool
val not_equal : mat -> mat -> bool
val greater : mat -> mat -> bool
val less : mat -> mat -> bool
val greater_equal : mat -> mat -> bool
val less_equal : mat -> mat -> bool
val elt_equal : mat -> mat -> mat
val elt_not_equal : mat -> mat -> mat
val elt_less : mat -> mat -> mat
val elt_greater : mat -> mat -> mat
val elt_less_equal : mat -> mat -> mat
val elt_greater_equal : mat -> mat -> mat
val equal_scalar : mat -> elt -> bool
val not_equal_scalar : mat -> elt -> bool
val less_scalar : mat -> elt -> bool
val greater_scalar : mat -> elt -> bool
val less_equal_scalar : mat -> elt -> bool
val greater_equal_scalar : mat -> elt -> bool
val elt_equal_scalar : mat -> elt -> mat
val elt_not_equal_scalar : mat -> elt -> mat
val elt_less_scalar : mat -> elt -> mat
val elt_greater_scalar : mat -> elt -> mat
val elt_less_equal_scalar : mat -> elt -> mat
val elt_greater_equal_scalar : mat -> elt -> mat
val approx_equal : ?eps:float -> mat -> mat -> bool
val approx_equal_scalar : ?eps:float -> mat -> elt -> bool
val approx_elt_equal : ?eps:float -> mat -> mat -> mat
val approx_elt_equal_scalar : ?eps:float -> mat -> elt -> mat
Randomisation functions
val draw_rows : ?replacement:bool -> mat -> int -> mat * int array
val draw_cols : ?replacement:bool -> mat -> int -> mat * int array
val draw_rows2 : - ?replacement:bool -> - mat -> - mat -> - int -> - mat * mat * int array
val draw_cols2 : - ?replacement:bool -> - mat -> - mat -> - int -> - mat * mat * int array
val shuffle_rows : mat -> mat
val shuffle_cols : mat -> mat
val shuffle : mat -> mat
Input/Output and helper functions
val to_array : mat -> elt array
val of_array : elt array -> int -> int -> mat
val to_arrays : mat -> elt array array
val of_arrays : elt array array -> mat
val to_rows : mat -> mat array
val of_rows : mat array -> mat
val to_cols : mat -> mat array
val of_cols : mat array -> mat
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - mat -> - unit
val save : out:string -> mat -> unit
val load : string -> mat
val save_txt : ?sep:string -> ?append:bool -> out:string -> mat -> unit
val load_txt : ?sep:string -> string -> mat
val save_npy : out:string -> mat -> unit
val load_npy : string -> mat
Unary mathematical operations
val min : ?axis:int -> ?keep_dims:bool -> mat -> mat
val min' : mat -> elt
val max : ?axis:int -> ?keep_dims:bool -> mat -> mat
val max' : mat -> elt
val minmax : ?axis:int -> ?keep_dims:bool -> mat -> mat * mat
val minmax' : mat -> elt * elt
val min_i : mat -> elt * int array
val max_i : mat -> elt * int array
val minmax_i : mat -> (elt * int array) * (elt * int array)
val trace : mat -> elt
val sum : ?axis:int -> ?keep_dims:bool -> mat -> mat
val sum' : mat -> elt
val prod : ?axis:int -> ?keep_dims:bool -> mat -> mat
val prod' : mat -> elt
val mean : ?axis:int -> ?keep_dims:bool -> mat -> mat
val mean' : mat -> elt
val var' : mat -> elt
val std' : mat -> elt
val sem : ?axis:int -> ?keep_dims:bool -> mat -> mat
val sem' : mat -> elt
val sum_rows : ?keep_dims:bool -> mat -> mat
val sum_cols : ?keep_dims:bool -> mat -> mat
val mean_rows : ?keep_dims:bool -> mat -> mat
val mean_cols : ?keep_dims:bool -> mat -> mat
val abs : mat -> mat
val abs2 : mat -> mat
val conj : mat -> mat
val neg : mat -> mat
val reci : mat -> mat
val reci_tol : ?tol:elt -> mat -> mat
val sqr : mat -> mat
val sqrt : mat -> mat
val cbrt : mat -> mat
val exp : mat -> mat
val exp2 : mat -> mat
val exp10 : mat -> mat
val expm1 : mat -> mat
val log : mat -> mat
val log10 : mat -> mat
val log2 : mat -> mat
val log1p : mat -> mat
val sin : mat -> mat
val cos : mat -> mat
val tan : mat -> mat
val asin : mat -> mat
val acos : mat -> mat
val atan : mat -> mat
val sinh : mat -> mat
val cosh : mat -> mat
val tanh : mat -> mat
val asinh : mat -> mat
val acosh : mat -> mat
val atanh : mat -> mat
val floor : mat -> mat
val ceil : mat -> mat
val round : mat -> mat
val trunc : mat -> mat
val fix : mat -> mat
val modf : mat -> mat * mat
val l1norm : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l1norm' : mat -> elt
val l2norm : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l2norm' : mat -> elt
val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> mat -> mat
val l2norm_sqr' : mat -> elt
val vecnorm : ?axis:int -> ?p:float -> ?keep_dims:bool -> mat -> mat
val vecnorm' : ?p:float -> mat -> elt
val cumsum : ?axis:int -> mat -> mat
val cumprod : ?axis:int -> mat -> mat
val cummin : ?axis:int -> mat -> mat
val cummax : ?axis:int -> mat -> mat
val diff : ?axis:int -> ?n:int -> mat -> mat
val var : ?axis:int -> ?keep_dims:bool -> mat -> mat
val std : ?axis:int -> ?keep_dims:bool -> mat -> mat
val mat2gray : ?amin:elt -> ?amax:elt -> mat -> mat
val lgamma : mat -> mat
val dawsn : mat -> mat
Binary mathematical operations
val add : mat -> mat -> mat
val sub : mat -> mat -> mat
val mul : mat -> mat -> mat
val div : mat -> mat -> mat
val add_scalar : mat -> elt -> mat
val sub_scalar : mat -> elt -> mat
val mul_scalar : mat -> elt -> mat
val div_scalar : mat -> elt -> mat
val scalar_add : elt -> mat -> mat
val scalar_sub : elt -> mat -> mat
val scalar_mul : elt -> mat -> mat
val scalar_div : elt -> mat -> mat
val dot : mat -> mat -> mat
val add_diag : mat -> elt -> mat
val pow : mat -> mat -> mat
val scalar_pow : elt -> mat -> mat
val pow_scalar : mat -> elt -> mat
val min2 : mat -> mat -> mat
val max2 : mat -> mat -> mat
val ssqr' : mat -> elt -> elt
val ssqr_diff' : mat -> mat -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> mat -> mat
val cov : ?b:mat -> a:mat -> mat
val kron : mat -> mat -> mat
val fma : mat -> mat -> mat -> mat
Functions of in-place modification
val create_ : out:mat -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:mat -> unit
val bernoulli_ : ?p:float -> out:mat -> unit
val zeros_ : out:mat -> unit
val ones_ : out:mat -> unit
val sort_ : mat -> unit
val one_hot_ : out:mat -> int -> mat -> unit
val copy_ : out:mat -> mat -> unit
val reshape_ : out:mat -> mat -> unit
val transpose_ : out:mat -> ?axis:int array -> mat -> unit
val sum_ : out:mat -> axis:int -> mat -> unit
val min_ : out:mat -> axis:int -> mat -> unit
val max_ : out:mat -> axis:int -> mat -> unit
val add_ : ?out:mat -> mat -> mat -> unit
val sub_ : ?out:mat -> mat -> mat -> unit
val mul_ : ?out:mat -> mat -> mat -> unit
val div_ : ?out:mat -> mat -> mat -> unit
val pow_ : ?out:mat -> mat -> mat -> unit
val atan2_ : ?out:mat -> mat -> mat -> unit
val hypot_ : ?out:mat -> mat -> mat -> unit
val fmod_ : ?out:mat -> mat -> mat -> unit
val min2_ : ?out:mat -> mat -> mat -> unit
val max2_ : ?out:mat -> mat -> mat -> unit
val add_scalar_ : ?out:mat -> mat -> elt -> unit
val sub_scalar_ : ?out:mat -> mat -> elt -> unit
val mul_scalar_ : ?out:mat -> mat -> elt -> unit
val div_scalar_ : ?out:mat -> mat -> elt -> unit
val pow_scalar_ : ?out:mat -> mat -> elt -> unit
val atan2_scalar_ : ?out:mat -> mat -> elt -> unit
val fmod_scalar_ : ?out:mat -> mat -> elt -> unit
val scalar_add_ : ?out:mat -> elt -> mat -> unit
val scalar_sub_ : ?out:mat -> elt -> mat -> unit
val scalar_mul_ : ?out:mat -> elt -> mat -> unit
val scalar_div_ : ?out:mat -> elt -> mat -> unit
val scalar_pow_ : ?out:mat -> elt -> mat -> unit
val scalar_atan2_ : ?out:mat -> elt -> mat -> unit
val scalar_fmod_ : ?out:mat -> elt -> mat -> unit
val fma_ : ?out:mat -> mat -> mat -> mat -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:mat -> - mat -> - mat -> - unit
val conj_ : ?out:mat -> mat -> unit
val abs_ : ?out:mat -> mat -> unit
val neg_ : ?out:mat -> mat -> unit
val reci_ : ?out:mat -> mat -> unit
val signum_ : ?out:mat -> mat -> unit
val sqr_ : ?out:mat -> mat -> unit
val sqrt_ : ?out:mat -> mat -> unit
val cbrt_ : ?out:mat -> mat -> unit
val exp_ : ?out:mat -> mat -> unit
val exp2_ : ?out:mat -> mat -> unit
val exp10_ : ?out:mat -> mat -> unit
val expm1_ : ?out:mat -> mat -> unit
val log_ : ?out:mat -> mat -> unit
val log2_ : ?out:mat -> mat -> unit
val log10_ : ?out:mat -> mat -> unit
val log1p_ : ?out:mat -> mat -> unit
val sin_ : ?out:mat -> mat -> unit
val cos_ : ?out:mat -> mat -> unit
val tan_ : ?out:mat -> mat -> unit
val asin_ : ?out:mat -> mat -> unit
val acos_ : ?out:mat -> mat -> unit
val atan_ : ?out:mat -> mat -> unit
val sinh_ : ?out:mat -> mat -> unit
val cosh_ : ?out:mat -> mat -> unit
val tanh_ : ?out:mat -> mat -> unit
val asinh_ : ?out:mat -> mat -> unit
val acosh_ : ?out:mat -> mat -> unit
val atanh_ : ?out:mat -> mat -> unit
val floor_ : ?out:mat -> mat -> unit
val ceil_ : ?out:mat -> mat -> unit
val round_ : ?out:mat -> mat -> unit
val trunc_ : ?out:mat -> mat -> unit
val fix_ : ?out:mat -> mat -> unit
val erf_ : ?out:mat -> mat -> unit
val erfc_ : ?out:mat -> mat -> unit
val relu_ : ?out:mat -> mat -> unit
val softplus_ : ?out:mat -> mat -> unit
val softsign_ : ?out:mat -> mat -> unit
val sigmoid_ : ?out:mat -> mat -> unit
val softmax_ : ?out:mat -> ?axis:int -> mat -> unit
val cumsum_ : ?out:mat -> ?axis:int -> mat -> unit
val cumprod_ : ?out:mat -> ?axis:int -> mat -> unit
val cummin_ : ?out:mat -> ?axis:int -> mat -> unit
val cummax_ : ?out:mat -> ?axis:int -> mat -> unit
val dropout_ : ?out:mat -> ?rate:float -> mat -> unit
val elt_equal_ : ?out:mat -> mat -> mat -> unit
val elt_not_equal_ : ?out:mat -> mat -> mat -> unit
val elt_less_ : ?out:mat -> mat -> mat -> unit
val elt_greater_ : ?out:mat -> mat -> mat -> unit
val elt_less_equal_ : ?out:mat -> mat -> mat -> unit
val elt_greater_equal_ : ?out:mat -> mat -> mat -> unit
val elt_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_not_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_less_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_greater_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_less_equal_scalar_ : ?out:mat -> mat -> elt -> unit
val elt_greater_equal_scalar_ : ?out:mat -> mat -> elt -> unit
\ No newline at end of file diff --git a/owl/Owl_dense_matrix_intf/module-type-Complex/index.html b/owl/Owl_dense_matrix_intf/module-type-Complex/index.html deleted file mode 100644 index cffec2ee9..000000000 --- a/owl/Owl_dense_matrix_intf/module-type-Complex/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Complex (owl.Owl_dense_matrix_intf.Complex)

Module type Owl_dense_matrix_intf.Complex

type mat
type cast_mat
Specific complex functions
val complex : cast_mat -> cast_mat -> mat
val polar : cast_mat -> cast_mat -> mat
val re : mat -> cast_mat
val im : mat -> cast_mat
\ No newline at end of file diff --git a/owl/Owl_dense_matrix_intf/module-type-Real/index.html b/owl/Owl_dense_matrix_intf/module-type-Real/index.html deleted file mode 100644 index b6597c0ae..000000000 --- a/owl/Owl_dense_matrix_intf/module-type-Real/index.html +++ /dev/null @@ -1,12 +0,0 @@ - -Real (owl.Owl_dense_matrix_intf.Real)

Module type Owl_dense_matrix_intf.Real

type elt
type mat
Specific real functions
val i0 : mat -> mat
val i0e : mat -> mat
val i1 : mat -> mat
val i1e : mat -> mat
val iv : v:mat -> mat -> mat
val scalar_iv : v:elt -> mat -> mat
val iv_scalar : v:mat -> elt -> mat
val j0 : mat -> mat
val j1 : mat -> mat
val jv : v:mat -> mat -> mat
val scalar_jv : v:elt -> mat -> mat
val jv_scalar : v:mat -> elt -> mat
val semidef : int -> mat
val min_rows : mat -> (elt * int * int) array
val min_cols : mat -> (elt * int * int) array
val max_rows : mat -> (elt * int * int) array
val max_cols : mat -> (elt * int * int) array
val signum : mat -> mat
val erf : mat -> mat
val erfc : mat -> mat
val logistic : mat -> mat
val relu : mat -> mat
val elu : ?alpha:elt -> mat -> mat
val leaky_relu : ?alpha:elt -> mat -> mat
val softplus : mat -> mat
val softsign : mat -> mat
val softmax : ?axis:int -> mat -> mat
val sigmoid : mat -> mat
val log_sum_exp' : mat -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> mat -> mat
val max_pool : - ?padding:Owl_types.padding -> - mat -> - int array -> - int array -> - mat
val avg_pool : - ?padding:Owl_types.padding -> - mat -> - int array -> - int array -> - mat
val atan2 : mat -> mat -> mat
val scalar_atan2 : elt -> mat -> mat
val atan2_scalar : mat -> elt -> mat
val hypot : mat -> mat -> mat
val fmod : mat -> mat -> mat
val fmod_scalar : mat -> elt -> mat
val scalar_fmod : elt -> mat -> mat
val cross_entropy' : mat -> mat -> elt
val clip_by_l2norm : elt -> mat -> mat
val poisson : mu:elt -> int -> int -> mat
\ No newline at end of file diff --git a/owl/Owl_dense_matrix_s/.dummy b/owl/Owl_dense_matrix_s/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_matrix_z/.dummy b/owl/Owl_dense_matrix_z/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_ndarray/.dummy b/owl/Owl_dense_ndarray/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_ndarray/Any/index.html b/owl/Owl_dense_ndarray/Any/index.html deleted file mode 100644 index 56c5db81e..000000000 --- a/owl/Owl_dense_ndarray/Any/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Any (owl.Owl_dense_ndarray.Any)

Module Owl_dense_ndarray.Any

include module type of struct include Owl_dense_ndarray_a end
type 'a arr = 'a Owl_dense_ndarray_a.arr = {
  1. mutable shape : int array;
  2. mutable stride : int array;
  3. mutable data : 'a array;
}
Create N-dimensional array
val create : int array -> 'a -> 'a arr
val init : int array -> (int -> 'a) -> 'a arr
val init_nd : int array -> (int array -> 'a) -> 'a arr
val sequential : ?a:float -> ?step:float -> int array -> float arr
val zeros : int array -> float arr
val ones : int array -> float arr
Obtain basic properties
val shape : 'a arr -> int array
val num_dims : 'a arr -> int
val nth_dim : 'a arr -> int -> int
val numel : 'a arr -> int
val same_shape : 'a arr -> 'a arr -> bool
val strides : 'a arr -> int array
val slice_size : 'a arr -> int array
val index_1d_nd : int -> int array -> int array
val index_nd_1d : int array -> int array -> int
Manipulate a N-dimensional array
val get : 'a arr -> int array -> 'a
val set : 'a arr -> int array -> 'a -> unit
val get_index : 'a arr -> int array array -> 'a array
val set_index : 'a arr -> int array array -> 'a array -> unit
val get_fancy : Owl_types.index list -> 'a arr -> 'a arr
val set_fancy : Owl_types.index list -> 'a arr -> 'a arr -> unit
val get_slice : int list list -> 'a arr -> 'a arr
val set_slice : int list list -> 'a arr -> 'a arr -> unit
val fill : 'a arr -> 'a -> unit
val copy_ : out:'a arr -> 'a arr -> unit
val copy : 'a arr -> 'a arr
val reshape : 'a arr -> int array -> 'a arr
val flatten : 'a arr -> 'a arr
val sub_left : 'a arr -> int array -> 'a arr
val squeeze : ?axis:int array -> 'a arr -> 'a arr
val expand : ?hi:bool -> 'a arr -> int -> 'a arr
val reverse : 'a arr -> 'a arr
val transpose : ?axis:int array -> 'a arr -> 'a arr
val swap : int -> int -> 'a arr -> 'a arr
val repeat : 'a arr -> int array -> 'a arr
val tile : 'a arr -> int array -> 'a arr
val concatenate : ?axis:int -> 'a arr array -> 'a arr
val pad : 'a -> int list list -> 'a arr -> 'a arr
Iterate array elements
val iter : ('a -> unit) -> 'a arr -> unit
val iteri : (int -> 'a -> unit) -> 'a arr -> unit
val map : ('a -> 'b) -> 'a arr -> 'b arr
val mapi : (int -> 'a -> 'b) -> 'a arr -> 'b arr
val filter : ('a -> bool) -> 'a arr -> int array
val filteri : (int -> 'a -> bool) -> 'a arr -> int array
val fold : ('a -> 'b -> 'a) -> 'a -> 'b arr -> 'a
val foldi : (int -> 'a -> 'b -> 'a) -> 'a -> 'b arr -> 'a
val iter2 : ('a -> 'b -> unit) -> 'a arr -> 'b arr -> unit
val iter2i : (int -> 'a -> 'b -> unit) -> 'a arr -> 'b arr -> unit
val map2 : ('a -> 'b -> 'c) -> 'a arr -> 'b arr -> 'c arr
val map2i : (int -> 'a -> 'b -> 'c) -> 'a arr -> 'b arr -> 'c arr
Examine array elements or compare two arrays
val exists : ('a -> bool) -> 'a arr -> bool
val not_exists : ('a -> bool) -> 'a arr -> bool
val for_all : ('a -> bool) -> 'a arr -> bool
val is_equal : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a arr -> bool
val not_equal : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a arr -> bool
val greater : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a arr -> bool
val less : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a arr -> bool
val greater_equal : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a arr -> bool
val less_equal : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a arr -> bool
val elt_equal : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a arr -> bool arr
val elt_not_equal : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a arr -> bool arr
val elt_greater : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a arr -> bool arr
val elt_less : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a arr -> bool arr
val elt_greater_equal : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a arr -> bool arr
val elt_less_equal : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a arr -> bool arr
val elt_equal_scalar : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a -> bool arr
val elt_not_equal_scalar : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a -> bool arr
val elt_greater_scalar : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a -> bool arr
val elt_less_scalar : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a -> bool arr
val elt_greater_equal_scalar : - ?cmp:('a -> 'a -> int) -> - 'a arr -> - 'a -> - bool arr
val elt_less_equal_scalar : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a -> bool arr
val sort : ?cmp:('a -> 'a -> int) -> 'a arr -> unit
val min : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a
val max : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a
val min_i : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a * int
val max_i : ?cmp:('a -> 'a -> int) -> 'a arr -> 'a * int
Input/Output functions
val of_array : 'a array -> int array -> 'a arr
val to_array : 'a arr -> 'a array
\ No newline at end of file diff --git a/owl/Owl_dense_ndarray/C/index.html b/owl/Owl_dense_ndarray/C/index.html deleted file mode 100644 index 17ddd063d..000000000 --- a/owl/Owl_dense_ndarray/C/index.html +++ /dev/null @@ -1,582 +0,0 @@ - -C (owl.Owl_dense_ndarray.C)

Module Owl_dense_ndarray.C

include module type of struct include Owl_dense_ndarray_c end
type elt = Stdlib.Complex.t
type arr = - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
type cast_arr = - (float, Stdlib.Bigarray.float32_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
include Owl_dense_ndarray_intf.Common with type elt := elt and type arr := arr
include Owl_base_dense_ndarray_intf.Common - with type elt := elt - with type arr := arr
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
Create N-dimensional array
val linspace : elt -> elt -> int -> arr

linspace k 0. 9. 10 ...

val logspace : ?base:float -> elt -> elt -> int -> arr

logspace k 0. 9. 10 ...

val unit_basis : int -> int -> arr

unit_basis k n i returns a unit basis vector with ith element set to 1.

Obtain basic properties
val num_dims : arr -> int
val nth_dim : arr -> int -> int
val nnz : arr -> int
val density : arr -> float
val size_in_bytes : arr -> int
val same_shape : arr -> arr -> bool
val same_data : arr -> arr -> bool
val ind : arr -> int -> int array
val i1d : arr -> int array -> int
Manipulate a N-dimensional array
val get_index : arr -> int array array -> elt array
val set_index : arr -> int array array -> elt array -> unit
val get_fancy : Owl_types.index list -> arr -> arr
val set_fancy : Owl_types.index list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val sub_ndarray : int array -> arr -> arr array
val slice_left : arr -> int array -> arr
val fill : arr -> elt -> unit
val resize : ?head:bool -> arr -> int array -> arr
val flip : ?axis:int -> arr -> arr
val rotate : arr -> int -> arr
val swap : int -> int -> arr -> arr
val concat_vertical : arr -> arr -> arr
val concat_horizontal : arr -> arr -> arr
val concat_vh : arr array array -> arr
val split_vh : (int * int) array array -> arr -> arr array array
val dropout : ?rate:float -> arr -> arr
val top : arr -> int -> int array array
val bottom : arr -> int -> int array array
val sort : arr -> arr
val sort1 : ?axis:int -> arr -> arr
val argsort : - arr -> - (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val mmap : Unix.file_descr -> ?pos:int64 -> bool -> int array -> arr
Iterate array elements
val iter2i : (int -> elt -> elt -> unit) -> arr -> arr -> unit
val iter2 : (elt -> elt -> unit) -> arr -> arr -> unit
val map2i : (int -> elt -> elt -> elt) -> arr -> arr -> arr
val map2 : (elt -> elt -> elt) -> arr -> arr -> arr
val iteri_nd : (int array -> elt -> unit) -> arr -> unit
val mapi_nd : (int array -> elt -> elt) -> arr -> arr
val foldi_nd : - ?axis:int -> - (int array -> elt -> elt -> elt) -> - elt -> - arr -> - arr
val scani_nd : ?axis:int -> (int array -> elt -> elt -> elt) -> arr -> arr
val filteri_nd : (int array -> elt -> bool) -> arr -> int array array
val iter2i_nd : (int array -> elt -> elt -> unit) -> arr -> arr -> unit
val map2i_nd : (int array -> elt -> elt -> elt) -> arr -> arr -> arr
val iteri_slice : ?axis:int -> (int -> arr -> unit) -> arr -> unit
val iter_slice : ?axis:int -> (arr -> unit) -> arr -> unit
val mapi_slice : ?axis:int -> (int -> arr -> 'c) -> arr -> 'c array
val map_slice : ?axis:int -> (arr -> 'c) -> arr -> 'c array
val filteri_slice : ?axis:int -> (int -> arr -> bool) -> arr -> arr array
val filter_slice : ?axis:int -> (arr -> bool) -> arr -> arr array
val foldi_slice : ?axis:int -> (int -> 'c -> arr -> 'c) -> 'c -> arr -> 'c
val fold_slice : ?axis:int -> ('c -> arr -> 'c) -> 'c -> arr -> 'c
Examine array elements or compare two arrays
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> elt -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> elt -> arr
Input/Output functions
val to_array : arr -> elt array
val save : out:string -> arr -> unit
val load : string -> arr
val save_npy : out:string -> arr -> unit
val load_npy : string -> arr
Unary mathematical operations
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod' : arr -> elt
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean' : arr -> elt
val median' : arr -> elt
val median : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var' : arr -> elt
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std' : arr -> elt
val sem : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sem' : arr -> elt
val minmax : ?axis:int -> ?keep_dims:bool -> arr -> arr * arr
val minmax' : arr -> elt * elt
val min_i : arr -> elt * int array
val max_i : arr -> elt * int array
val minmax_i : arr -> (elt * int array) * (elt * int array)
val abs2 : arr -> arr
val conj : arr -> arr
val reci : arr -> arr
val reci_tol : ?tol:elt -> arr -> arr
val cbrt : arr -> arr
val exp2 : arr -> arr
val exp10 : arr -> arr
val expm1 : arr -> arr
val log1p : arr -> arr
val trunc : arr -> arr
val fix : arr -> arr
val modf : arr -> arr * arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm' : arr -> elt
val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm_sqr' : arr -> elt
val vecnorm : ?axis:int -> ?p:float -> ?keep_dims:bool -> arr -> arr
val vecnorm' : ?p:float -> arr -> elt
val cumsum : ?axis:int -> arr -> arr
val cumprod : ?axis:int -> arr -> arr
val cummin : ?axis:int -> arr -> arr
val cummax : ?axis:int -> arr -> arr
val diff : ?axis:int -> ?n:int -> arr -> arr
val lgamma : arr -> arr
Binary mathematical operations
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val ssqr' : arr -> elt -> elt
val ssqr_diff' : arr -> arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
Tensor Calculus
val contract1 : (int * int) array -> arr -> arr
val contract2 : (int * int) array -> arr -> arr -> arr
Experimental functions
val sum_slices : ?axis:int -> arr -> arr
val slide : ?axis:int -> ?ofs:int -> ?step:int -> window:int -> arr -> arr
Functions of in-place modification
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:float -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val sort_ : arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_fancy_ : out:arr -> Owl_types.index list -> arr -> unit
val set_fancy_ : out:arr -> Owl_types.index list -> arr -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val conj_ : ?out:arr -> arr -> unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit

Matrix functions

val col : arr -> int -> arr
val cols : arr -> int array -> arr
val dot : arr -> arr -> arr
val trace : arr -> elt
val to_arrays : arr -> elt array array
val draw_rows : ?replacement:bool -> arr -> int -> arr * int array
val draw_cols : ?replacement:bool -> arr -> int -> arr * int array
val draw_rows2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
val draw_cols2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
include Owl_dense_ndarray_intf.NN with type arr := arr
include Owl_base_dense_ndarray_intf.NN with type arr := arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val max_pool2d_argmax : - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - arr - * (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
include Owl_dense_ndarray_intf.Complex - with type elt := elt - and type arr := arr - and type cast_arr := cast_arr
Complex operations
val complex : cast_arr -> cast_arr -> arr

complex re im constructs a complex ndarray/matrix from re and im. re and im contain the real and imaginary part of x respectively.

Note that both re and im can be complex but must have same type. The real part of re will be the real part of x and the imaginary part of im will be the imaginary part of x.

val polar : cast_arr -> cast_arr -> arr

polar rho theta constructs a complex ndarray/matrix from polar coordinates rho and theta. rho contains the magnitudes and theta contains phase angles. Note that the behaviour is undefined if rho has negative elelments or theta has infinity elelments.

val re : arr -> cast_arr
val im : arr -> cast_arr
val sum' : arr -> elt
include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (%) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (%$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (+=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (.!{;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
include sig ... end
val (.%{}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
val mpow : Owl_linalg_c.mat -> float -> Owl_linalg_c.mat
\ No newline at end of file diff --git a/owl/Owl_dense_ndarray/D/index.html b/owl/Owl_dense_ndarray/D/index.html deleted file mode 100644 index db0d72a5a..000000000 --- a/owl/Owl_dense_ndarray/D/index.html +++ /dev/null @@ -1,579 +0,0 @@ - -D (owl.Owl_dense_ndarray.D)

Module Owl_dense_ndarray.D

include module type of struct include Owl_dense_ndarray_d end
type elt = float
type arr = - (float, Stdlib.Bigarray.float64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
include Owl_dense_ndarray_intf.Common with type elt := elt and type arr := arr
include Owl_base_dense_ndarray_intf.Common - with type elt := elt - with type arr := arr
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
Create N-dimensional array
val linspace : elt -> elt -> int -> arr

linspace k 0. 9. 10 ...

val logspace : ?base:float -> elt -> elt -> int -> arr

logspace k 0. 9. 10 ...

val unit_basis : int -> int -> arr

unit_basis k n i returns a unit basis vector with ith element set to 1.

Obtain basic properties
val num_dims : arr -> int
val nth_dim : arr -> int -> int
val nnz : arr -> int
val density : arr -> float
val size_in_bytes : arr -> int
val same_shape : arr -> arr -> bool
val same_data : arr -> arr -> bool
val ind : arr -> int -> int array
val i1d : arr -> int array -> int
Manipulate a N-dimensional array
val get_index : arr -> int array array -> elt array
val set_index : arr -> int array array -> elt array -> unit
val get_fancy : Owl_types.index list -> arr -> arr
val set_fancy : Owl_types.index list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val sub_ndarray : int array -> arr -> arr array
val slice_left : arr -> int array -> arr
val fill : arr -> elt -> unit
val resize : ?head:bool -> arr -> int array -> arr
val flip : ?axis:int -> arr -> arr
val rotate : arr -> int -> arr
val swap : int -> int -> arr -> arr
val concat_vertical : arr -> arr -> arr
val concat_horizontal : arr -> arr -> arr
val concat_vh : arr array array -> arr
val split_vh : (int * int) array array -> arr -> arr array array
val dropout : ?rate:float -> arr -> arr
val top : arr -> int -> int array array
val bottom : arr -> int -> int array array
val sort : arr -> arr
val sort1 : ?axis:int -> arr -> arr
val argsort : - arr -> - (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val mmap : Unix.file_descr -> ?pos:int64 -> bool -> int array -> arr
Iterate array elements
val iter2i : (int -> elt -> elt -> unit) -> arr -> arr -> unit
val iter2 : (elt -> elt -> unit) -> arr -> arr -> unit
val map2i : (int -> elt -> elt -> elt) -> arr -> arr -> arr
val map2 : (elt -> elt -> elt) -> arr -> arr -> arr
val iteri_nd : (int array -> elt -> unit) -> arr -> unit
val mapi_nd : (int array -> elt -> elt) -> arr -> arr
val foldi_nd : - ?axis:int -> - (int array -> elt -> elt -> elt) -> - elt -> - arr -> - arr
val scani_nd : ?axis:int -> (int array -> elt -> elt -> elt) -> arr -> arr
val filteri_nd : (int array -> elt -> bool) -> arr -> int array array
val iter2i_nd : (int array -> elt -> elt -> unit) -> arr -> arr -> unit
val map2i_nd : (int array -> elt -> elt -> elt) -> arr -> arr -> arr
val iteri_slice : ?axis:int -> (int -> arr -> unit) -> arr -> unit
val iter_slice : ?axis:int -> (arr -> unit) -> arr -> unit
val mapi_slice : ?axis:int -> (int -> arr -> 'c) -> arr -> 'c array
val map_slice : ?axis:int -> (arr -> 'c) -> arr -> 'c array
val filteri_slice : ?axis:int -> (int -> arr -> bool) -> arr -> arr array
val filter_slice : ?axis:int -> (arr -> bool) -> arr -> arr array
val foldi_slice : ?axis:int -> (int -> 'c -> arr -> 'c) -> 'c -> arr -> 'c
val fold_slice : ?axis:int -> ('c -> arr -> 'c) -> 'c -> arr -> 'c
Examine array elements or compare two arrays
Input/Output functions
val to_array : arr -> elt array
val save : out:string -> arr -> unit
val load : string -> arr
val save_npy : out:string -> arr -> unit
val load_npy : string -> arr
Unary mathematical operations
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod' : arr -> elt
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean' : arr -> elt
val median' : arr -> elt
val median : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var' : arr -> elt
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std' : arr -> elt
val sem : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sem' : arr -> elt
val minmax : ?axis:int -> ?keep_dims:bool -> arr -> arr * arr
val minmax' : arr -> elt * elt
val min_i : arr -> elt * int array
val max_i : arr -> elt * int array
val minmax_i : arr -> (elt * int array) * (elt * int array)
val abs2 : arr -> arr
val conj : arr -> arr
val reci : arr -> arr
val reci_tol : ?tol:elt -> arr -> arr
val cbrt : arr -> arr
val exp2 : arr -> arr
val exp10 : arr -> arr
val expm1 : arr -> arr
val log1p : arr -> arr
val trunc : arr -> arr
val fix : arr -> arr
val modf : arr -> arr * arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> arr -> arr
val vecnorm : ?axis:int -> ?p:float -> ?keep_dims:bool -> arr -> arr
val vecnorm' : ?p:float -> arr -> elt
val cumsum : ?axis:int -> arr -> arr
val cumprod : ?axis:int -> arr -> arr
val cummin : ?axis:int -> arr -> arr
val cummax : ?axis:int -> arr -> arr
val diff : ?axis:int -> ?n:int -> arr -> arr
val lgamma : arr -> arr
Binary mathematical operations
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val ssqr' : arr -> elt -> elt
val ssqr_diff' : arr -> arr -> elt
Tensor Calculus
val contract1 : (int * int) array -> arr -> arr
val contract2 : (int * int) array -> arr -> arr -> arr
Experimental functions
val slide : ?axis:int -> ?ofs:int -> ?step:int -> window:int -> arr -> arr
Functions of in-place modification
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:float -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val sort_ : arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_fancy_ : out:arr -> Owl_types.index list -> arr -> unit
val set_fancy_ : out:arr -> Owl_types.index list -> arr -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val conj_ : ?out:arr -> arr -> unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit

Matrix functions

val col : arr -> int -> arr
val cols : arr -> int array -> arr
val to_arrays : arr -> elt array array
val draw_rows : ?replacement:bool -> arr -> int -> arr * int array
val draw_cols : ?replacement:bool -> arr -> int -> arr * int array
val draw_rows2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
val draw_cols2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
include Owl_dense_ndarray_intf.Real with type elt := elt and type arr := arr
include Owl_base_dense_ndarray_intf.Real - with type elt := elt - with type arr := arr
val sum_slices : ?axis:int -> arr -> arr
val signum : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val atan2 : arr -> arr -> arr
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> float -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> float -> arr
val dot : arr -> arr -> arr
val trace : arr -> elt
Helper functions
val float_to_elt : float -> elt
val elt_to_float : elt -> float
Real operations
val i0 : arr -> arr
val i0e : arr -> arr
val i1 : arr -> arr
val i1e : arr -> arr
val iv : v:arr -> arr -> arr
val scalar_iv : v:elt -> arr -> arr
val iv_scalar : v:arr -> elt -> arr
val j0 : arr -> arr
val j1 : arr -> arr
val jv : v:arr -> arr -> arr
val scalar_jv : v:elt -> arr -> arr
val jv_scalar : v:arr -> elt -> arr
val erf : arr -> arr
val erfc : arr -> arr
val logistic : arr -> arr
val elu : ?alpha:elt -> arr -> arr
val leaky_relu : ?alpha:elt -> arr -> arr
val softplus : arr -> arr
val softsign : arr -> arr
val softmax : ?axis:int -> arr -> arr
val sigmoid : arr -> arr
val log_sum_exp' : arr -> float
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val fmod_scalar : arr -> elt -> arr
val scalar_fmod : elt -> arr -> arr
val cross_entropy' : arr -> arr -> float
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
val poisson : mu:elt -> int array -> arr
val poisson_ : mu:elt -> out:arr -> unit
include Owl_dense_ndarray_intf.NN with type arr := arr
include Owl_base_dense_ndarray_intf.NN with type arr := arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val max_pool2d_argmax : - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - arr - * (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
include Owl_dense_ndarray_intf.Distribution with type arr := arr
Stats & distribution functions
val uniform_rvs : a:arr -> b:arr -> n:int -> arr
val uniform_pdf : a:arr -> b:arr -> arr -> arr
val uniform_logpdf : a:arr -> b:arr -> arr -> arr
val uniform_cdf : a:arr -> b:arr -> arr -> arr
val uniform_logcdf : a:arr -> b:arr -> arr -> arr
val uniform_ppf : a:arr -> b:arr -> arr -> arr
val uniform_sf : a:arr -> b:arr -> arr -> arr
val uniform_logsf : a:arr -> b:arr -> arr -> arr
val uniform_isf : a:arr -> b:arr -> arr -> arr
val gaussian_rvs : mu:arr -> sigma:arr -> n:int -> arr
val gaussian_pdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logpdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_cdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logcdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_ppf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_sf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logsf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_isf : mu:arr -> sigma:arr -> arr -> arr
val exponential_rvs : lambda:arr -> n:int -> arr
val exponential_pdf : lambda:arr -> arr -> arr
val exponential_logpdf : lambda:arr -> arr -> arr
val exponential_cdf : lambda:arr -> arr -> arr
val exponential_logcdf : lambda:arr -> arr -> arr
val exponential_ppf : lambda:arr -> arr -> arr
val exponential_sf : lambda:arr -> arr -> arr
val exponential_logsf : lambda:arr -> arr -> arr
val exponential_isf : lambda:arr -> arr -> arr
val gamma_rvs : shape:arr -> scale:arr -> n:int -> arr
val gamma_pdf : shape:arr -> scale:arr -> arr -> arr
val gamma_logpdf : shape:arr -> scale:arr -> arr -> arr
val gamma_cdf : shape:arr -> scale:arr -> arr -> arr
val gamma_logcdf : shape:arr -> scale:arr -> arr -> arr
val gamma_ppf : shape:arr -> scale:arr -> arr -> arr
val gamma_sf : shape:arr -> scale:arr -> arr -> arr
val gamma_logsf : shape:arr -> scale:arr -> arr -> arr
val gamma_isf : shape:arr -> scale:arr -> arr -> arr
val beta_rvs : a:arr -> b:arr -> n:int -> arr
val beta_pdf : a:arr -> b:arr -> arr -> arr
val beta_logpdf : a:arr -> b:arr -> arr -> arr
val beta_cdf : a:arr -> b:arr -> arr -> arr
val beta_logcdf : a:arr -> b:arr -> arr -> arr
val beta_ppf : a:arr -> b:arr -> arr -> arr
val beta_sf : a:arr -> b:arr -> arr -> arr
val beta_logsf : a:arr -> b:arr -> arr -> arr
val beta_isf : a:arr -> b:arr -> arr -> arr
val chi2_rvs : df:arr -> n:int -> arr
val chi2_pdf : df:arr -> arr -> arr
val chi2_logpdf : df:arr -> arr -> arr
val chi2_cdf : df:arr -> arr -> arr
val chi2_logcdf : df:arr -> arr -> arr
val chi2_ppf : df:arr -> arr -> arr
val chi2_sf : df:arr -> arr -> arr
val chi2_logsf : df:arr -> arr -> arr
val chi2_isf : df:arr -> arr -> arr
val f_rvs : dfnum:arr -> dfden:arr -> n:int -> arr
val f_pdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logpdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_cdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logcdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_ppf : dfnum:arr -> dfden:arr -> arr -> arr
val f_sf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logsf : dfnum:arr -> dfden:arr -> arr -> arr
val f_isf : dfnum:arr -> dfden:arr -> arr -> arr
val cauchy_rvs : loc:arr -> scale:arr -> n:int -> arr
val cauchy_pdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logpdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_cdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logcdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_ppf : loc:arr -> scale:arr -> arr -> arr
val cauchy_sf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logsf : loc:arr -> scale:arr -> arr -> arr
val cauchy_isf : loc:arr -> scale:arr -> arr -> arr
val lomax_rvs : shape:arr -> scale:arr -> n:int -> arr
val lomax_pdf : shape:arr -> scale:arr -> arr -> arr
val lomax_logpdf : shape:arr -> scale:arr -> arr -> arr
val lomax_cdf : shape:arr -> scale:arr -> arr -> arr
val lomax_logcdf : shape:arr -> scale:arr -> arr -> arr
val lomax_ppf : shape:arr -> scale:arr -> arr -> arr
val lomax_sf : shape:arr -> scale:arr -> arr -> arr
val lomax_logsf : shape:arr -> scale:arr -> arr -> arr
val lomax_isf : shape:arr -> scale:arr -> arr -> arr
val weibull_rvs : shape:arr -> scale:arr -> n:int -> arr
val weibull_pdf : shape:arr -> scale:arr -> arr -> arr
val weibull_logpdf : shape:arr -> scale:arr -> arr -> arr
val weibull_cdf : shape:arr -> scale:arr -> arr -> arr
val weibull_logcdf : shape:arr -> scale:arr -> arr -> arr
val weibull_ppf : shape:arr -> scale:arr -> arr -> arr
val weibull_sf : shape:arr -> scale:arr -> arr -> arr
val weibull_logsf : shape:arr -> scale:arr -> arr -> arr
val weibull_isf : shape:arr -> scale:arr -> arr -> arr
val laplace_rvs : loc:arr -> scale:arr -> n:int -> arr
val laplace_pdf : loc:arr -> scale:arr -> arr -> arr
val laplace_logpdf : loc:arr -> scale:arr -> arr -> arr
val laplace_cdf : loc:arr -> scale:arr -> arr -> arr
val laplace_logcdf : loc:arr -> scale:arr -> arr -> arr
val laplace_ppf : loc:arr -> scale:arr -> arr -> arr
val laplace_sf : loc:arr -> scale:arr -> arr -> arr
val laplace_logsf : loc:arr -> scale:arr -> arr -> arr
val laplace_isf : loc:arr -> scale:arr -> arr -> arr
val gumbel1_rvs : a:arr -> b:arr -> n:int -> arr
val gumbel1_pdf : a:arr -> b:arr -> arr -> arr
val gumbel1_logpdf : a:arr -> b:arr -> arr -> arr
val gumbel1_cdf : a:arr -> b:arr -> arr -> arr
val gumbel1_logcdf : a:arr -> b:arr -> arr -> arr
val gumbel1_ppf : a:arr -> b:arr -> arr -> arr
val gumbel1_sf : a:arr -> b:arr -> arr -> arr
val gumbel1_logsf : a:arr -> b:arr -> arr -> arr
val gumbel1_isf : a:arr -> b:arr -> arr -> arr
val gumbel2_rvs : a:arr -> b:arr -> n:int -> arr
val gumbel2_pdf : a:arr -> b:arr -> arr -> arr
val gumbel2_logpdf : a:arr -> b:arr -> arr -> arr
val gumbel2_cdf : a:arr -> b:arr -> arr -> arr
val gumbel2_logcdf : a:arr -> b:arr -> arr -> arr
val gumbel2_ppf : a:arr -> b:arr -> arr -> arr
val gumbel2_sf : a:arr -> b:arr -> arr -> arr
val gumbel2_logsf : a:arr -> b:arr -> arr -> arr
val gumbel2_isf : a:arr -> b:arr -> arr -> arr
val logistic_rvs : loc:arr -> scale:arr -> n:int -> arr
val logistic_pdf : loc:arr -> scale:arr -> arr -> arr
val logistic_logpdf : loc:arr -> scale:arr -> arr -> arr
val logistic_cdf : loc:arr -> scale:arr -> arr -> arr
val logistic_logcdf : loc:arr -> scale:arr -> arr -> arr
val logistic_ppf : loc:arr -> scale:arr -> arr -> arr
val logistic_sf : loc:arr -> scale:arr -> arr -> arr
val logistic_logsf : loc:arr -> scale:arr -> arr -> arr
val logistic_isf : loc:arr -> scale:arr -> arr -> arr
val lognormal_rvs : mu:arr -> sigma:arr -> n:int -> arr
val lognormal_pdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logpdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_cdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logcdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_ppf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_sf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logsf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_isf : mu:arr -> sigma:arr -> arr -> arr
val rayleigh_rvs : sigma:arr -> n:int -> arr
val rayleigh_pdf : sigma:arr -> arr -> arr
val rayleigh_logpdf : sigma:arr -> arr -> arr
val rayleigh_cdf : sigma:arr -> arr -> arr
val rayleigh_logcdf : sigma:arr -> arr -> arr
val rayleigh_ppf : sigma:arr -> arr -> arr
val rayleigh_sf : sigma:arr -> arr -> arr
val rayleigh_logsf : sigma:arr -> arr -> arr
val rayleigh_isf : sigma:arr -> arr -> arr
include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (%) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (%$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (+=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (.!{;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
include sig ... end
val (.%{}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
val mpow : Owl_linalg_d.mat -> float -> Owl_linalg_d.mat
\ No newline at end of file diff --git a/owl/Owl_dense_ndarray/Generic/index.html b/owl/Owl_dense_ndarray/Generic/index.html deleted file mode 100644 index 80888c01b..000000000 --- a/owl/Owl_dense_ndarray/Generic/index.html +++ /dev/null @@ -1,805 +0,0 @@ - -Generic (owl.Owl_dense_ndarray.Generic)

Module Owl_dense_ndarray.Generic

include module type of struct include Owl_dense_ndarray_generic end

About the comparison of two complex numbers x and y, Owl uses the following conventions: 1) x and y are equal iff both real and imaginary parts are equal; 2) x is less than y if the magnitude of x is less than the magnitude of y; in case both x and y have the same magnitudes, x is less than y if the phase of x is less than the phase of y; 3) less or equal, greater, greater or equal relation can be further defined atop of the aforementioned conventions.

The generic module supports operations for the following Bigarry element types: Int8_signed, Int8_unsigned, Int16_signed, Int16_unsigned, Int32, Int64, Float32, Float64, Complex32, Complex64.

Type definition
type ('a, 'b) t = ('a, 'b, Stdlib.Bigarray.c_layout) Stdlib.Bigarray.Genarray.t

N-dimensional array type, i.e. Bigarray Genarray type.

type ('a, 'b) kind = ('a, 'b) Stdlib.Bigarray.kind

Type of the ndarray, e.g., Bigarray.Float32, Bigarray.Complex64, and etc.

Create Ndarrays
val empty : ('a, 'b) kind -> int array -> ('a, 'b) t

empty Bigarray.Float64 [|3;4;5|] creates a three diemensional array of Bigarray.Float64 type. Each dimension has the following size: 3, 4, and 5. The elements in the array are not initialised, they can be any value. empty is faster than zeros to create a ndarray.

The module only supports the following four types of ndarray: Bigarray.Float32, Bigarray.Float64, Bigarray.Complex32, and Bigarray.Complex64.

val create : ('a, 'b) kind -> int array -> 'a -> ('a, 'b) t

create Bigarray.Float64 [|3;4;5|] 2. creates a three-diemensional array of Bigarray.Float64 type. Each dimension has the following size: 3, 4, and 5. The elements in the array are initialised to 2.

val init : ('a, 'b) kind -> int array -> (int -> 'a) -> ('a, 'b) t

init Bigarray.Float64 d f creates a ndarray x of shape d, then using f to initialise the elements in x. The input of f is 1-dimensional index of the ndarray. You need to explicitly convert it if you need N-dimensional index. The function ind can help you.

val init_nd : ('a, 'b) kind -> int array -> (int array -> 'a) -> ('a, 'b) t

init_nd is almost the same as init but f receives n-dimensional index as input. It is more convenient since you don't have to convert the index by yourself, but this also means init_nd is slower than init.

val zeros : ('a, 'b) kind -> int array -> ('a, 'b) t

zeros Bigarray.Complex32 [|3;4;5|] creates a three-diemensional array of Bigarray.Complex32 type. Each dimension has the following size: 3, 4, and 5. The elements in the array are initialised to "zero". Depending on the kind, zero can be 0. or Complex.zero.

val ones : ('a, 'b) kind -> int array -> ('a, 'b) t

ones Bigarray.Complex32 [|3;4;5|] creates a three-diemensional array of Bigarray.Complex32 type. Each dimension has the following size: 3, 4, and 5. The elements in the array are initialised to "one". Depending on the kind, one can be 1. or Complex.one.

val eye : ('a, 'b) kind -> int -> ('a, 'b) t

eye m creates an m by m identity matrix.

val uniform : ('a, 'b) kind -> ?a:'a -> ?b:'a -> int array -> ('a, 'b) t

uniform Bigarray.Float64 [|3;4;5|] creates a three-diemensional array of type Bigarray.Float64. Each dimension has the following size: 3, 4, and 5. The elements in the array follow a uniform distribution 0,1.

val gaussian : ('a, 'b) kind -> ?mu:'a -> ?sigma:'a -> int array -> ('a, 'b) t

gaussian Float64 [|3;4;5|] ...

val poisson : ('a, 'b) kind -> mu:float -> int array -> ('a, 'b) t

poisson Float64 [|3;4;5|] ...

val sequential : ('a, 'b) kind -> ?a:'a -> ?step:'a -> int array -> ('a, 'b) t

sequential Bigarray.Float64 [|3;4;5|] 2. creates a three-diemensional array of type Bigarray.Float64. Each dimension has the following size: 3, 4, and 5. The elements in the array are assigned sequential values.

?a specifies the starting value and the default value is zero; whilst ?step specifies the step size with default value one.

val linspace : ('a, 'b) kind -> 'a -> 'a -> int -> ('a, 'b) t

linspace k 0. 9. 10 ...

val logspace : ('a, 'b) kind -> ?base:float -> 'a -> 'a -> int -> ('a, 'b) t

logspace k 0. 9. 10 ...

val bernoulli : ('a, 'b) kind -> ?p:float -> int array -> ('a, 'b) t

bernoulli k ~p:0.3 [|2;3;4|]

val complex : - ('a, 'b) kind -> - ('c, 'd) kind -> - ('a, 'b) t -> - ('a, 'b) t -> - ('c, 'd) t

complex re im constructs a complex ndarray/matrix from re and im. re and im contain the real and imaginary part of x respectively.

Note that both re and im can be complex but must have same type. The real part of re will be the real part of x and the imaginary part of im will be the imaginary part of x.

val polar : - ('a, 'b) kind -> - ('c, 'd) kind -> - ('a, 'b) t -> - ('a, 'b) t -> - ('c, 'd) t

complex rho theta constructs a complex ndarray/matrix from polar coordinates rho and theta. rho contains the magnitudes and theta contains phase angles. Note that the behaviour is undefined if rho has negative elelments or theta has infinity elelments.

val unit_basis : ('a, 'b) kind -> int -> int -> ('a, 'b) t

unit_basis k n i returns a unit basis vector with ith element set to 1.

Obtain basic properties
val shape : ('a, 'b) t -> int array

shape x returns the shape of ndarray x.

val num_dims : ('a, 'b) t -> int

num_dims x returns the number of dimensions of ndarray x.

val nth_dim : ('a, 'b) t -> int -> int

nth_dim x returns the size of the nth dimension of x.

val numel : ('a, 'b) t -> int

numel x returns the number of elements in x.

val nnz : ('a, 'b) t -> int

nnz x returns the number of non-zero elements in x.

val density : ('a, 'b) t -> float

density x returns the percentage of non-zero elements in x.

val size_in_bytes : ('a, 'b) t -> int

size_in_bytes x returns the size of x in bytes in memory.

val same_shape : ('a, 'b) t -> ('c, 'd) t -> bool

same_shape x y checks whether x and y has the same shape or not.

val same_data : ('a, 'b) t -> ('a, 'b) t -> bool

same_data x y checks whether x and y share the same underlying data in the memory. Namely, both variables point to the same memory address. This is done by checking the Data pointer in the Bigarray structure.

This function is very useful for avoiding unnecessary copying between two ndarrays especially if one has been reshaped or sliced.

val kind : ('a, 'b) t -> ('a, 'b) kind

kind x returns the type of ndarray x. It is one of the four possible values: Bigarray.Float32, Bigarray.Float64, Bigarray.Complex32, and Bigarray.Complex64.

val strides : ('a, 'b) t -> int array

strides x calculates the strides of x. E.g., if x is of shape [|3;4;5|], the returned strides will be [|20;5;1|].

val slice_size : ('a, 'b) t -> int array

slice_size calculates the slice size in each dimension, E.g., if x is of shape [|3;4;5|], the returned slice size will be [|60; 20; 5|].

val ind : ('a, 'b) t -> int -> int array

ind x i converts x's one-dimensional index i to n-dimensional one.

val i1d : ('a, 'b) t -> int array -> int

i1d x i converts x's n-dimensional index i to one-dimensional one.

Manipulate Ndarrays
val get : ('a, 'b) t -> int array -> 'a

get x i returns the value at i in x. E.g., get x [|0;2;1|] returns the value at [|0;2;1|] in x.

val set : ('a, 'b) t -> int array -> 'a -> unit

set x i a sets the value at i to a in x.

val get_index : ('a, 'b) t -> int array array -> 'a array

get_index i x returns an array of element values specified by the indices i. The length of array i equals the number of dimensions of x. The arrays in i must have the same length, and each represents the indices in that dimension.

E.g., [| [|1;2|]; [|3;4|] |] returns the value of elements at position (1,3) and (2,4) respectively.

val set_index : ('a, 'b) t -> int array array -> 'a array -> unit

set_index i x a sets the value of elements in x according to the indices specified by i. The length of array i equals the number of dimensions of x. The arrays in i must have the same length, and each represents the indices in that dimension.

If the length of a equals to the length of i, then each element will be assigned by the value in the corresponding position in x. If the length of a equals to one, then all the elements will be assigned the same value.

val get_fancy : Owl_types.index list -> ('a, 'b) t -> ('a, 'b) t

get_fancy s x returns a copy of the slice in x. The slice is defined by a which is an int option array. E.g., for a ndarray x of dimension [|2; 2; 3|], slice [0] x takes the following slices of index \(0,*,*\), i.e., [|0;0;0|], [|0;0;1|], [|0;0;2|] ... Also note that if the length of s is less than the number of dimensions of x, slice function will append slice definition to higher diemensions by assuming all the elements in missing dimensions will be taken.

Basically, slice function offers very much the same semantic as that in numpy, i.e., start:stop:step grammar, so if you how to index and slice ndarray in numpy, you should not find it difficult to use this function. Please just refer to numpy documentation or my tutorial.

There are two differences between slice_left and slice: slice_left does not make a copy but simply moving the pointer; slice_left can only make a slice from left-most axis whereas slice is much more flexible and can work on arbitrary axis which need not start from left-most side.

val set_fancy : Owl_types.index list -> ('a, 'b) t -> ('a, 'b) t -> unit

set_fancy axis x y set the slice defined by axis in x according to the values in y. y must have the same shape as the one defined by axis.

About the slice definition of axis, please refer to get_fancy function.

val get_fancy_ext : Owl_types.index array -> ('a, 'b) t -> ('a, 'b) t

This function is used for extended indexing operator since ocaml 4.10.0. The indexing and slicing syntax become much ligher.

val set_fancy_ext : Owl_types.index array -> ('a, 'b) t -> ('a, 'b) t -> unit

This function is used for extended indexing operator since ocaml 4.10.0. The indexing and slicing syntax become much ligher.

val get_slice : int list list -> ('a, 'b) t -> ('a, 'b) t

get_slice axis x aims to provide a simpler version of get_fancy. This function assumes that every list element in the passed in int list list represents a range, i.e., R constructor.

E.g., [[];[0;3];[0]] is equivalent to [R []; R [0;3]; R [0]].

val set_slice : int list list -> ('a, 'b) t -> ('a, 'b) t -> unit

set_slice axis x y aims to provide a simpler version of set_fancy. This function assumes that every list element in the passed in int list list represents a range, i.e., R constructor.

E.g., [[];[0;3];[0]] is equivalent to [R []; R [0;3]; R [0]].

val get_slice_ext : int list array -> ('a, 'b) t -> ('a, 'b) t

get_slice_ext axis x is used for extended indexing operator since ocaml 4.10.0. The indexing and slicing syntax become much ligher.

E.g., x.%{0;1;2}.

val set_slice_ext : int list array -> ('a, 'b) t -> ('a, 'b) t -> unit

Similar to get_slice_ext axis x, this function is used for extended indexing operator since ocaml 4.10.0. The indexing and slicing syntax become much ligher.

val sub_left : ('a, 'b) t -> int -> int -> ('a, 'b) t

Some as Bigarray.sub_left, please refer to Bigarray documentation.

val sub_ndarray : int array -> ('a, 'b) t -> ('a, 'b) t array

sub_ndarray parts x is similar to Bigarray.sub_left. It splits the passed in ndarray x along the axis 0 according to parts. The elelments in parts do not need to be equal but they must sum up to the dimension along axis zero.

The returned sub-ndarrays share the same memory as x. Because there is no copies made, this function is much faster than using `split` function to divide the lowest dimensionality of x.

val slice_left : ('a, 'b) t -> int array -> ('a, 'b) t

Same as Bigarray.slice_left, please refer to Bigarray documentation.

val reset : ('a, 'b) t -> unit

reset x resets all the elements in x to zero.

val fill : ('a, 'b) t -> 'a -> unit

fill x a assigns the value a to the elements in x.

val copy : ('a, 'b) t -> ('a, 'b) t

copy x makes a copy of x.

val resize : ?head:bool -> ('a, 'b) t -> int array -> ('a, 'b) t

resize ~head x d resizes the ndarray x. If there are less number of elelments in the new shape than the old one, the new ndarray shares part of the memory with the old x. head indicates the alignment between the new and old data, either from head or from tail. Note the data is flattened before the operation.

If there are more elements in the new shape d. Then new memory space will be allocated and the content of x will be copied to the new memory. The rest of the allocated space will be filled with zeros. The default value of head is true.

val reshape : ('a, 'b) t -> int array -> ('a, 'b) t

reshape x d transforms x into a new shape definted by d. Note the reshape function will not make a copy of x, the returned ndarray shares the same memory with the original x.

One shape dimension (only one) can be set to -1. In this case, the value is inferred from the length of the array and remaining dimensions.

val flatten : ('a, 'b) t -> ('a, 'b) t

flatten x transforms x into a one-dimsonal array without making a copy. Therefore the returned value shares the same memory space with original x.

val reverse : ('a, 'b) t -> ('a, 'b) t

reverse x reverse the order of all elements in the flattened x and returns the results in a new ndarray. The original x remains intact.

val flip : ?axis:int -> ('a, 'b) t -> ('a, 'b) t

flip ~axis x flips a matrix/ndarray along axis. By default axis = 0. The result is returned in a new matrix/ndarray, so the original x remains intact.

val rotate : ('a, 'b) t -> int -> ('a, 'b) t

rotate x d rotates x clockwise d degrees. d must be multiple times of 90, otherwise the function will fail. If x is an n-dimensional array, then the function rotates the plane formed by the first and second dimensions.

val transpose : ?axis:int array -> ('a, 'b) t -> ('a, 'b) t

transpose ~axis x makes a copy of x, then transpose it according to ~axis. ~axis must be a valid permutation of x dimension indices. E.g., for a three-dimensional ndarray, it can be [2;1;0], [0;2;1], [1;2;0], and etc.

val swap : int -> int -> ('a, 'b) t -> ('a, 'b) t

swap i j x makes a copy of x, then swaps the data on axis i and j.

val tile : ('a, 'b) t -> int array -> ('a, 'b) t

tile x a tiles the data in x according the repetition specified by a. This function provides the exact behaviour as numpy.tile, please refer to the numpy's online documentation for details.

val repeat : ('a, 'b) t -> int array -> ('a, 'b) t

repeat x a repeats the elements of x according the repetition specified by a. The i-th element of a specifies the number of times that the individual entries of the i-th dimension of x should be repeated.

val concat_vertical : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

concat_vertical x y concatenates two ndarray x and y vertically. This is just a convenient function for concatenating two ndarrays along their lowest dimension, i.e. 0.

The associated operator is @||, please refer to :doc:`owl_operator`.

val concat_horizontal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

concat_horizontal x y concatenates two ndarrays x and y horizontally. This is just a convenient function for concatenating two ndarrays along their highest dimension.

The associated operator is @=, please refer to :doc:`owl_operator`.

val concat_vh : ('a, 'b) t array array -> ('a, 'b) t

concat_vh is used to assemble small parts of matrices into a bigger one. E.g. In [| [|a; b; c|]; [|d; e; f|]; [|g; h; i|] |], wherein `a, b, c ... i` are matrices of different shapes. They will be concatenated into a big matrix as follows.

.. math:: \beginmatrix a & b & c \\ d & e & f \\ g & h & i \endmatrix

This is achieved by first concatenating along axis:1 for each element in the array, then concatenating along axis:0. The number of elements in each array needs not to be equal as long as the aggregated dimensions match. E.g., please check the following example.

.. code-block:: ocaml

let a00 = Mat.sequential 2 3 in let a01 = Mat.sequential 2 2 in let a02 = Mat.sequential 2 1 in let a10 = Mat.sequential 3 3 in let a11 = Mat.sequential 3 3 in Mat.concat_vh | [|a00; a01; a02|]; [|a10; a11|] |;;

val concatenate : ?axis:int -> ('a, 'b) t array -> ('a, 'b) t

concatenate ~axis:2 x concatenates an array of ndarrays along the third dimension. For the ndarrays in x, they must have the same shape except the dimension specified by axis. The default value of axis is 0, i.e., the lowest dimension of a matrix/ndarray.

val stack : ?axis:int -> ('a, 'b) t array -> ('a, 'b) t

stack ~axis x stacks an array of ndarrays along the axis dimension. For example, if x contains K ndarrays of shape |2;3|, then stack ~axis:1 x will return an ndarray of dimensions |2;K;3|. The ndarrays in x, they must all have the same shape. The default value of axis is 0.

val split : ?axis:int -> int array -> ('a, 'b) t -> ('a, 'b) t array

split ~axis parts x splits an ndarray x into parts along the specified axis. This function is the inverse operation of concatenate. The elements in x must sum up to the dimension in the specified axis.

val split_vh : (int * int) array array -> ('a, 'b) t -> ('a, 'b) t array array

split_vh parts x splits a passed in ndarray x along the first two dimensions, i.e. axis 0 and axis 1. This is the inverse operation of concat_vh function, and the function is very useful in dividing a big matrix into smaller (especially heterogeneous) parts.

For example, given a matrix x of shape [|8;10|], it is possible to split in the following ways.

.. code-block:: ocaml

Mat.split_vh | [|(8,5);(8,5)|] | x;; Mat.split_vh | [|(4,5);(4,5)|]; [|(4,10)|] | x;; Mat.split_vh | [|(4,5);(4,5)|]; [|(4,5);(4,5)|] | x;;

val squeeze : ?axis:int array -> ('a, 'b) t -> ('a, 'b) t

squeeze ~axis x removes single-dimensional entries from the shape of x.

val expand : ?hi:bool -> ('a, 'b) t -> int -> ('a, 'b) t

expand x d reshapes x by increasing its rank from num_dims x to d. The opposite operation is squeeze x. The hi parameter is used to specify whether the expandsion is along high dimension (by setting true), or along the low dimension (by setting false). The default value is false.

val pad : ?v:'a -> int list list -> ('a, 'b) t -> ('a, 'b) t

pad ~v p x pads a ndarray x with a constant value v. The padding index p is a list of lists of 2 integers. These two integers denote padding width at both edges of one dimension of x.

val dropout : ?rate:float -> ('a, 'b) t -> ('a, 'b) t

dropout ~rate:0.3 x drops out 30% of the elements in x, in other words, by setting their values to zeros.

val top : ('a, 'b) t -> int -> int array array

top x n returns the indices of n greatest values of x. The indices are arranged according to the corresponding element values, from the greatest one to the smallest one.

val bottom : ('a, 'b) t -> int -> int array array

bottom x n returns the indices of n smallest values of x. The indices are arranged according to the corresponding element values, from the smallest one to the greatest one.

val sort1 : ?axis:int -> ('a, 'b) t -> ('a, 'b) t

sort1 ~axis x performs quicksort of the elements along specific axis in x. A new copy is returned as result, the original x remains intact.

val sort : ('a, 'b) t -> ('a, 'b) t

sort x performs quicksort of the elelments in x. A new copy is returned as result, the original x remains intact. If you want to perform in-place sorting, please use `sort_` instead.

val argsort : ('a, 'b) t -> (int64, Stdlib.Bigarray.int64_elt) t

argsort x returns the indices with which the elements in x are sorted in increasing order. Note that the returned index ndarray has the same shape as that of x, and the indices are 1D indices.

val draw : ?axis:int -> ('a, 'b) t -> int -> ('a, 'b) t * int array

draw ~axis x n draws n samples from x along the specified axis, with replacement. axis is set to zero by default. The return is a tuple of both samples and the indices of the selected samples.

val mmap : - Unix.file_descr -> - ?pos:int64 -> - ('a, 'b) kind -> - bool -> - int array -> - ('a, 'b) t

mmap fd kind layout shared dims ...

Iteration functions
val iteri : (int -> 'a -> unit) -> ('a, 'b) t -> unit

iteri f x applies function f to each element in x. Note that 1d index is passed to function f, you need to convert it to nd-index by yourself.

val iter : ('a -> unit) -> ('a, 'b) t -> unit

iter f x is similar to iteri f x, except the index is not passed to f.

val mapi : (int -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t

mapi f x makes a copy of x, then applies f to each element in x.

val map : ('a -> 'a) -> ('a, 'b) t -> ('a, 'b) t

map f x is similar to mapi f x except the index is not passed.

val foldi : - ?axis:int -> - (int -> 'a -> 'a -> 'a) -> - 'a -> - ('a, 'b) t -> - ('a, 'b) t

foldi ~axis f a x folds (or reduces) the elements in x from left along the specified axis using passed in function f. a is the initial element and in f i acc b acc is the accumulater and b is one of the elements in x along the same axis. Note that i is 1d index of b.

val fold : ?axis:int -> ('a -> 'a -> 'a) -> 'a -> ('a, 'b) t -> ('a, 'b) t

Similar to foldi, except that the index of an element is not passed to f.

val scani : ?axis:int -> (int -> 'a -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t

scan ~axis f x scans the x along the specified axis using passed in function f. f acc a b returns an updated acc which will be passed in the next call to f i acc a. This function can be used to implement accumulative operations such as sum and prod functions. Note that the i is 1d index of a in x.

val scan : ?axis:int -> ('a -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t

Similar to scani, except that the index of an element is not passed to f.

val filteri : (int -> 'a -> bool) -> ('a, 'b) t -> int array

filteri f x uses f to filter out certain elements in x. An element will be included if f returns true. The returned result is an array of 1-dimensional indices of the selected elements. To obtain the n-dimensional indices, you need to convert it manually with Owl's helper function.

val filter : ('a -> bool) -> ('a, 'b) t -> int array

Similar to filteri, but the indices are not passed to f.

val iter2i : (int -> 'a -> 'b -> unit) -> ('a, 'c) t -> ('b, 'd) t -> unit

Similar to iteri but applies to two N-dimensional arrays x and y. Both x and y must have the same shape.

val iter2 : ('a -> 'b -> unit) -> ('a, 'c) t -> ('b, 'd) t -> unit

Similar to iter2i, except that the index not passed to f.

val map2i : (int -> 'a -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

map2i f x y applies f to two elements of the same position in both x and y. Note that 1d index is passed to function f.

val map2 : ('a -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

map2 f x y is similar to map2i f x y except the index is not passed.

val iteri_nd : (int array -> 'a -> unit) -> ('a, 'b) t -> unit

Similar to iteri but n-d indices are passed to the user function.

val mapi_nd : (int array -> 'a -> 'a) -> ('a, 'b) t -> ('a, 'b) t

Similar to mapi but n-d indices are passed to the user function.

val foldi_nd : - ?axis:int -> - (int array -> 'a -> 'a -> 'a) -> - 'a -> - ('a, 'b) t -> - ('a, 'b) t

Similar to foldi but n-d indices are passed to the user function.

val scani_nd : - ?axis:int -> - (int array -> 'a -> 'a -> 'a) -> - ('a, 'b) t -> - ('a, 'b) t

Similar to scani but n-d indices are passed to the user function.

val filteri_nd : (int array -> 'a -> bool) -> ('a, 'b) t -> int array array

Similar to filteri but n-d indices are returned.

val iter2i_nd : - (int array -> 'a -> 'c -> unit) -> - ('a, 'b) t -> - ('c, 'd) t -> - unit

Similar to iter2i but n-d indices are passed to the user function.

val map2i_nd : - (int array -> 'a -> 'a -> 'a) -> - ('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t

Similar to map2i but n-d indices are passed to the user function.

val iteri_slice : - ?axis:int -> - (int -> ('a, 'b) t -> unit) -> - ('a, 'b) t -> - unit

iteri_slice ~axis f x iterates the slices along the specified axis in x and applies the function f. The 1-d index of of the slice is passed in. By default, the axis is 0. Setting axis to the highest dimension is not allowed because in that case you can just use `iteri` to iterate all the elements in x which is more efficient.

Note that the slice is obtained by slicing left (due to Owl's C-layout ndarray) a sub-array out of x. E.g., if x has shape [|3;4;5|], setting axis=0 will iterate three 4 x 5 matrices. The slice shares the same memory with x so no copy is made.

val iter_slice : ?axis:int -> (('a, 'b) t -> unit) -> ('a, 'b) t -> unit

Similar to iteri_slice but slice index is not passed in.

val mapi_slice : - ?axis:int -> - (int -> ('a, 'b) t -> 'c) -> - ('a, 'b) t -> - 'c array

mapi_slice ~axis f x maps the slices along the specified axis in x and applies the function f. By default, axis is 0. The index of of the slice is passed in.

Please refer to iteri_slice for more details.

val map_slice : ?axis:int -> (('a, 'b) t -> 'c) -> ('a, 'b) t -> 'c array

Similar to mapi_slice but slice index is not passed in.

val filteri_slice : - ?axis:int -> - (int -> ('a, 'b) t -> bool) -> - ('a, 'b) t -> - ('a, 'b) t array

filteri_slice ~axis f x filters the slices along the specified axis in x. The slices which satisfy the predicate f are returned in an array.

Please refer to iteri_slice for more details.

val filter_slice : - ?axis:int -> - (('a, 'b) t -> bool) -> - ('a, 'b) t -> - ('a, 'b) t array

Similar to filteri_slice but slice index is not passed in.

val foldi_slice : - ?axis:int -> - (int -> 'c -> ('a, 'b) t -> 'c) -> - 'c -> - ('a, 'b) t -> - 'c

foldi_slice ~axis f a x fold (left) the slices along the specified axis in x. The slices which satisfy the predicate f are returned in an array.

Please refer to iteri_slice for more details.

val fold_slice : - ?axis:int -> - ('c -> ('a, 'b) t -> 'c) -> - 'c -> - ('a, 'b) t -> - 'c

Similar to foldi_slice but slice index is not passed in.

Examination & Comparison
val exists : ('a -> bool) -> ('a, 'b) t -> bool

exists f x checks all the elements in x using f. If at least one element satisfies f then the function returns true otherwise false.

val not_exists : ('a -> bool) -> ('a, 'b) t -> bool

not_exists f x checks all the elements in x, the function returns true only if all the elements fail to satisfy f : float -> bool.

val for_all : ('a -> bool) -> ('a, 'b) t -> bool

for_all f x checks all the elements in x, the function returns true if and only if all the elements pass the check of function f.

val is_zero : ('a, 'b) t -> bool

is_zero x returns true if all the elements in x are zeros.

val is_positive : ('a, 'b) t -> bool

is_positive x returns true if all the elements in x are positive.

val is_negative : ('a, 'b) t -> bool

is_negative x returns true if all the elements in x are negative.

val is_nonpositive : ('a, 'b) t -> bool

is_nonpositive returns true if all the elements in x are non-positive.

val is_nonnegative : ('a, 'b) t -> bool

is_nonnegative returns true if all the elements in x are non-negative.

val is_normal : ('a, 'b) t -> bool

is_normal x returns true if all the elelments in x are normal float numbers, i.e., not NaN, not INF, not SUBNORMAL. Please refer to

https://www.gnu.org/software/libc/manual/html_node/Floating-Point-Classes.html https://www.gnu.org/software/libc/manual/html_node/Infinity-and-NaN.html#Infinity-and-NaN

val not_nan : ('a, 'b) t -> bool

not_nan x returns false if there is any NaN element in x. Otherwise, the function returns true indicating all the numbers in x are not NaN.

val not_inf : ('a, 'b) t -> bool

not_inf x returns false if there is any positive or negative INF element in x. Otherwise, the function returns true.

val equal : ('a, 'b) t -> ('a, 'b) t -> bool

equal x y returns true if two matrices x and y are equal.

val not_equal : ('a, 'b) t -> ('a, 'b) t -> bool

not_equal x y returns true if there is at least one element in x is not equal to that in y.

val greater : ('a, 'b) t -> ('a, 'b) t -> bool

greater x y returns true if all the elements in x are greater than the corresponding elements in y.

val less : ('a, 'b) t -> ('a, 'b) t -> bool

less x y returns true if all the elements in x are smaller than the corresponding elements in y.

val greater_equal : ('a, 'b) t -> ('a, 'b) t -> bool

greater_equal x y returns true if all the elements in x are not smaller than the corresponding elements in y.

val less_equal : ('a, 'b) t -> ('a, 'b) t -> bool

less_equal x y returns true if all the elements in x are not greater than the corresponding elements in y.

val elt_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

elt_equal x y performs element-wise = comparison of x and y. Assume that a is from x and b is the corresponding element of a from y of the same position. The function returns another binary (0 and 1) ndarray/matrix wherein 1 indicates a = b.

The function supports broadcast operation.

val elt_not_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

elt_not_equal x y performs element-wise != comparison of x and y. Assume that a is from x and b is the corresponding element of a from y of the same position. The function returns another binary (0 and 1) ndarray/matrix wherein 1 indicates a <> b.

The function supports broadcast operation.

val elt_less : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

elt_less x y performs element-wise < comparison of x and y. Assume that a is from x and b is the corresponding element of a from y of the same position. The function returns another binary (0 and 1) ndarray/matrix wherein 1 indicates a < b.

The function supports broadcast operation.

val elt_greater : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

elt_greater x y performs element-wise > comparison of x and y. Assume that a is from x and b is the corresponding element of a from y of the same position. The function returns another binary (0 and 1) ndarray/matrix wherein 1 indicates a > b.

The function supports broadcast operation.

val elt_less_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

elt_less_equal x y performs element-wise <= comparison of x and y. Assume that a is from x and b is the corresponding element of a from y of the same position. The function returns another binary (0 and 1) ndarray/matrix wherein 1 indicates a <= b.

The function supports broadcast operation.

val elt_greater_equal : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

elt_greater_equal x y performs element-wise >= comparison of x and y. Assume that a is from x and b is the corresponding element of a from y of the same position. The function returns another binary (0 and 1) ndarray/matrix wherein 1 indicates a >= b.

The function supports broadcast operation.

val equal_scalar : ('a, 'b) t -> 'a -> bool

equal_scalar x a checks if all the elements in x are equal to a. The function returns true iff for every element b in x, b = a.

val not_equal_scalar : ('a, 'b) t -> 'a -> bool

not_equal_scalar x a checks if all the elements in x are not equal to a. The function returns true iff for every element b in x, b <> a.

val less_scalar : ('a, 'b) t -> 'a -> bool

less_scalar x a checks if all the elements in x are less than a. The function returns true iff for every element b in x, b < a.

val greater_scalar : ('a, 'b) t -> 'a -> bool

greater_scalar x a checks if all the elements in x are greater than a. The function returns true iff for every element b in x, b > a.

val less_equal_scalar : ('a, 'b) t -> 'a -> bool

less_equal_scalar x a checks if all the elements in x are less or equal to a. The function returns true iff for every element b in x, b <= a.

val greater_equal_scalar : ('a, 'b) t -> 'a -> bool

greater_equal_scalar x a checks if all the elements in x are greater or equal to a. The function returns true iff for every element b in x, b >= a.

val elt_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

elt_equal_scalar x a performs element-wise = comparison of x and a. Assume that b is one element from x The function returns another binary (0 and 1) ndarray/matrix wherein 1 of the corresponding position indicates a = b, otherwise 0.

val elt_not_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

elt_not_equal_scalar x a performs element-wise != comparison of x and a. Assume that b is one element from x The function returns another binary (0 and 1) ndarray/matrix wherein 1 of the corresponding position indicates a <> b, otherwise 0.

val elt_less_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

elt_less_scalar x a performs element-wise < comparison of x and a. Assume that b is one element from x The function returns another binary (0 and 1) ndarray/matrix wherein 1 of the corresponding position indicates a < b, otherwise 0.

val elt_greater_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

elt_greater_scalar x a performs element-wise > comparison of x and a. Assume that b is one element from x The function returns another binary (0 and 1) ndarray/matrix wherein 1 of the corresponding position indicates a > b, otherwise 0.

val elt_less_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

elt_less_equal_scalar x a performs element-wise <= comparison of x and a. Assume that b is one element from x The function returns another binary (0 and 1) ndarray/matrix wherein 1 of the corresponding position indicates a <= b, otherwise 0.

val elt_greater_equal_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

elt_greater_equal_scalar x a performs element-wise >= comparison of x and a. Assume that b is one element from x The function returns another binary (0 and 1) ndarray/matrix wherein 1 of the corresponding position indicates a >= b, otherwise 0.

val approx_equal : ?eps:float -> ('a, 'b) t -> ('a, 'b) t -> bool

approx_equal ~eps x y returns true if x and y are approximately equal, i.e., for any two elements a from x and b from y, we have abs (a - b) < eps. For complex numbers, the eps applies to both real and imaginary part.

Note: the threshold check is exclusive for passed in eps, i.e., the threshold interval is (a-eps, a+eps).

val approx_equal_scalar : ?eps:float -> ('a, 'b) t -> 'a -> bool

approx_equal_scalar ~eps x a returns true all the elements in x are approximately equal to a, i.e., abs (x - a) < eps. For complex numbers, the eps applies to both real and imaginary part.

Note: the threshold check is exclusive for the passed in eps.

val approx_elt_equal : ?eps:float -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

approx_elt_equal ~eps x y compares the element-wise equality of x and y, then returns another binary (i.e., 0 and 1) ndarray/matrix wherein 1 indicates that two corresponding elements a from x and b from y are considered as approximately equal, namely abs (a - b) < eps.

val approx_elt_equal_scalar : ?eps:float -> ('a, 'b) t -> 'a -> ('a, 'b) t

approx_elt_equal_scalar ~eps x a compares all the elements of x to a scalar value a, then returns another binary (i.e., 0 and 1) ndarray/matrix wherein 1 indicates that the element b from x is considered as approximately equal to a, namely abs (a - b) < eps.

Input/Output functions
val of_array : ('a, 'b) kind -> 'a array -> int array -> ('a, 'b) t

of_array k x d takes an array x and converts it into an ndarray of type k and shape d.

val to_array : ('a, 'b) t -> 'a array

to_array x converts an ndarray x to OCaml's array type. Note that the ndarray x is flattened before conversion.

val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:('a -> string) -> - ('a, 'b) t -> - unit

print x prints all the elements in x as well as their indices. max_row and max_col specify the maximum number of rows and columns to display. header specifies whether or not to print out the headers. fmt is the function to format every element into string.

val pp_dsnda : Stdlib.Format.formatter -> ('a, 'b) t -> unit

pp_dsnda x prints x in OCaml toplevel. If the ndarray is too long, pp_dsnda only prints out parts of the ndarray.

val save : out:string -> ('a, 'b) t -> unit

save ~out x serialises a ndarray x to a file of name out.

val load : ('a, 'b) kind -> string -> ('a, 'b) t

load k s loads previously serialised ndarray from file s into memory. It is necessary to specify the type of the ndarray with parameter k.

val save_npy : out:string -> ('a, 'b) t -> unit

save_npy ~out x saves the matrix x into a npy file out. This function is implemented using npy-ocaml https://github.com/LaurentMazare/npy-ocaml.

val load_npy : ('a, 'b) kind -> string -> ('a, 'b) t

load_npy file load a npy file into a matrix of type k. If the matrix is in the file is not of type k, fails with [file]: incorrect format. This function is implemented using npy-ocaml https://github.com/LaurentMazare/npy-ocaml.

Unary math operators
val re_c2s : - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t -> - (float, Stdlib.Bigarray.float32_elt) t

re_c2s x returns all the real components of x in a new ndarray of same shape.

val re_z2d : - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t -> - (float, Stdlib.Bigarray.float64_elt) t

re_d2z x returns all the real components of x in a new ndarray of same shape.

val im_c2s : - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t -> - (float, Stdlib.Bigarray.float32_elt) t

im_c2s x returns all the imaginary components of x in a new ndarray of same shape.

val im_z2d : - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t -> - (float, Stdlib.Bigarray.float64_elt) t

im_d2z x returns all the imaginary components of x in a new ndarray of same shape.

val sum : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

sum ~axis x sums the elements in x along specified axis.

val sum' : ('a, 'b) t -> 'a

sum' x returns the sumtion of all elements in x.

val sum_reduce : ?axis:int array -> ('a, 'b) t -> ('a, 'b) t

sum_reduce ~axis x sums the elements in x along multiple axes specified in the axis array.

val prod : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

prod ~axis x multiples the elements in x along specified axis.

val prod' : ('a, 'b) t -> 'a

prod x returns the product of all elements in x along passed in axises.

val mean : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

mean ~axis x calculates the mean along specified axis.

val mean' : ('a, 'b) t -> 'a

mean' x calculates the mean of all the elements in x.

val median : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

median ~axis x calculates the median along specified axis of x.

val median' : ('a, 'b) t -> 'a

median x calculates the median of a flattened version of x.

val var : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

var ~axis x calculates the variance along specified axis.

val var' : ('a, 'b) t -> 'a

var' x calculates the variance of all the elements in x.

val std : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

std ~axis calculates the standard deviation along specified axis.

val std' : ('a, 'b) t -> 'a

std' x calculates the standard deviation of all the elements in x.

val sem : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

sem ~axis calculates the standard error of mean along specified axis.

val sem' : ('a, 'b) t -> 'a

sem' x calculates the standard error of mean of all the elements in x.

val min : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

min x returns the minimum of all elements in x along specified axis. If no axis is specified, x will be flattened and the minimum of all the elements will be returned. For two complex numbers, the one with the smaller magnitude will be selected. If two magnitudes are the same, the one with the smaller phase will be selected.

val min' : ('a, 'b) t -> 'a

min' x is similar to min but returns the minimum of all elements in x in scalar value.

val max : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

max x returns the maximum of all elements in x along specified axis. If no axis is specified, x will be flattened and the maximum of all the elements will be returned. For two complex numbers, the one with the greater magnitude will be selected. If two magnitudes are the same, the one with the greater phase will be selected.

val max' : ('a, 'b) t -> 'a

max' x is similar to max but returns the maximum of all elements in x in scalar value.

val minmax : - ?axis:int -> - ?keep_dims:bool -> - ('a, 'b) t -> - ('a, 'b) t * ('a, 'b) t

minmax' x returns (min_v, max_v), min_v is the minimum value in x while max_v is the maximum.

val minmax' : ('a, 'b) t -> 'a * 'a

minmax' x returns (min_v, max_v), min_v is the minimum value in x while max_v is the maximum.

val min_i : ('a, 'b) t -> 'a * int array

min_i x returns the minimum of all elements in x as well as its index.

val max_i : ('a, 'b) t -> 'a * int array

max_i x returns the maximum of all elements in x as well as its index.

val minmax_i : ('a, 'b) t -> ('a * int array) * ('a * int array)

minmax_i x returns ((min_v,min_i), (max_v,max_i)) where (min_v,min_i) is the minimum value in x along with its index while (max_v,max_i) is the maximum value along its index.

val abs : ('a, 'b) t -> ('a, 'b) t

abs x returns the absolute value of all elements in x in a new ndarray.

val abs_c2s : - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t -> - (float, Stdlib.Bigarray.float32_elt) t

abs_c2s x is similar to abs but takes complex32 as input.

val abs_z2d : - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t -> - (float, Stdlib.Bigarray.float64_elt) t

abs_z2d x is similar to abs but takes complex64 as input.

val abs2 : ('a, 'b) t -> ('a, 'b) t

abs2 x returns the square of absolute value of all elements in x in a new ndarray.

val abs2_c2s : - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t -> - (float, Stdlib.Bigarray.float32_elt) t

abs2_c2s x is similar to abs2 but takes complex32 as input.

val abs2_z2d : - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t -> - (float, Stdlib.Bigarray.float64_elt) t

abs2_z2d x is similar to abs2 but takes complex64 as input.

val conj : ('a, 'b) t -> ('a, 'b) t

conj x returns the conjugate of the complex x.

val neg : ('a, 'b) t -> ('a, 'b) t

neg x negates the elements in x and returns the result in a new ndarray.

val reci : ('a, 'b) t -> ('a, 'b) t

reci x computes the reciprocal of every elements in x and returns the result in a new ndarray.

val reci_tol : ?tol:'a -> ('a, 'b) t -> ('a, 'b) t

reci_tol ~tol x computes the reciprocal of every element in x. Different from reci, reci_tol sets the elements whose abs value smaller than tol to zeros. If tol is not specified, the default Owl_utils.eps Float32 will be used. For complex numbers, refer to Owl's doc to see how to compare.

val signum : (float, 'a) t -> (float, 'a) t

signum computes the sign value (-1 for negative numbers, 0 (or -0) for zero, 1 for positive numbers, nan for nan).

val sqr : ('a, 'b) t -> ('a, 'b) t

sqr x computes the square of the elements in x and returns the result in a new ndarray.

val sqrt : ('a, 'b) t -> ('a, 'b) t

sqrt x computes the square root of the elements in x and returns the result in a new ndarray.

val cbrt : ('a, 'b) t -> ('a, 'b) t

cbrt x computes the cubic root of the elements in x and returns the result in a new ndarray.

val exp : ('a, 'b) t -> ('a, 'b) t

exp x computes the exponential of the elements in x and returns the result in a new ndarray.

val exp2 : ('a, 'b) t -> ('a, 'b) t

exp2 x computes the base-2 exponential of the elements in x and returns the result in a new ndarray.

val exp10 : ('a, 'b) t -> ('a, 'b) t

exp10 x computes the base-10 exponential of the elements in x and returns the result in a new ndarray.

val expm1 : ('a, 'b) t -> ('a, 'b) t

expm1 x computes exp x -. 1. of the elements in x and returns the result in a new ndarray.

val log : ('a, 'b) t -> ('a, 'b) t

log x computes the logarithm of the elements in x and returns the result in a new ndarray.

val log10 : ('a, 'b) t -> ('a, 'b) t

log10 x computes the base-10 logarithm of the elements in x and returns the result in a new ndarray.

val log2 : ('a, 'b) t -> ('a, 'b) t

log2 x computes the base-2 logarithm of the elements in x and returns the result in a new ndarray.

val log1p : ('a, 'b) t -> ('a, 'b) t

log1p x computes log (1 + x) of the elements in x and returns the result in a new ndarray.

val sin : ('a, 'b) t -> ('a, 'b) t

sin x computes the sine of the elements in x and returns the result in a new ndarray.

val cos : ('a, 'b) t -> ('a, 'b) t

cos x computes the cosine of the elements in x and returns the result in a new ndarray.

val tan : ('a, 'b) t -> ('a, 'b) t

tan x computes the tangent of the elements in x and returns the result in a new ndarray.

val asin : ('a, 'b) t -> ('a, 'b) t

asin x computes the arc sine of the elements in x and returns the result in a new ndarray.

val acos : ('a, 'b) t -> ('a, 'b) t

acos x computes the arc cosine of the elements in x and returns the result in a new ndarray.

val atan : ('a, 'b) t -> ('a, 'b) t

atan x computes the arc tangent of the elements in x and returns the result in a new ndarray.

val sinh : ('a, 'b) t -> ('a, 'b) t

sinh x computes the hyperbolic sine of the elements in x and returns the result in a new ndarray.

val cosh : ('a, 'b) t -> ('a, 'b) t

cosh x computes the hyperbolic cosine of the elements in x and returns the result in a new ndarray.

val tanh : ('a, 'b) t -> ('a, 'b) t

tanh x computes the hyperbolic tangent of the elements in x and returns the result in a new ndarray.

val asinh : ('a, 'b) t -> ('a, 'b) t

asinh x computes the hyperbolic arc sine of the elements in x and returns the result in a new ndarray.

val acosh : ('a, 'b) t -> ('a, 'b) t

acosh x computes the hyperbolic arc cosine of the elements in x and returns the result in a new ndarray.

val atanh : ('a, 'b) t -> ('a, 'b) t

atanh x computes the hyperbolic arc tangent of the elements in x and returns the result in a new ndarray.

val floor : ('a, 'b) t -> ('a, 'b) t

floor x computes the floor of the elements in x and returns the result in a new ndarray.

val ceil : ('a, 'b) t -> ('a, 'b) t

ceil x computes the ceiling of the elements in x and returns the result in a new ndarray.

val round : ('a, 'b) t -> ('a, 'b) t

round x rounds the elements in x and returns the result in a new ndarray.

val trunc : ('a, 'b) t -> ('a, 'b) t

trunc x computes the truncation of the elements in x and returns the result in a new ndarray.

val fix : ('a, 'b) t -> ('a, 'b) t

fix x rounds each element of x to the nearest integer toward zero. For positive elements, the behavior is the same as floor. For negative ones, the behavior is the same as ceil.

val modf : ('a, 'b) t -> ('a, 'b) t * ('a, 'b) t

modf x performs modf over all the elements in x, the fractal part is saved in the first element of the returned tuple whereas the integer part is saved in the second element.

val erf : (float, 'a) t -> (float, 'a) t

erf x computes the error function of the elements in x and returns the result in a new ndarray.

val erfc : (float, 'a) t -> (float, 'a) t

erfc x computes the complementary error function of the elements in x and returns the result in a new ndarray.

val logistic : (float, 'a) t -> (float, 'a) t

logistic x computes the logistic function 1/(1 + exp(-a) of the elements in x and returns the result in a new ndarray.

val relu : (float, 'a) t -> (float, 'a) t

relu x computes the rectified linear unit function max(x, 0) of the elements in x and returns the result in a new ndarray.

val elu : ?alpha:float -> (float, 'a) t -> (float, 'a) t

elu alpha x computes the exponential linear unit function x >= 0. ? x : (alpha * (exp(x) - 1)) of the elements in x and returns the result in a new ndarray.

val leaky_relu : ?alpha:float -> (float, 'a) t -> (float, 'a) t

leaky_relu alpha x computes the leaky rectified linear unit function x >= 0. ? x : (alpha * x) of the elements in x and returns the result in a new ndarray.

val softplus : (float, 'a) t -> (float, 'a) t

softplus x computes the softplus function log(1 + exp(x) of the elements in x and returns the result in a new ndarray.

val softsign : (float, 'a) t -> (float, 'a) t

softsign x computes the softsign function x / (1 + abs(x)) of the elements in x and returns the result in a new ndarray.

val softmax : ?axis:int -> (float, 'a) t -> (float, 'a) t

softmax x computes the softmax functions (exp x) / (sum (exp x)) of all the elements along the specified axis in x and returns the result in a new ndarray.

By default, axis = -1, i.e. along the highest dimension.

val sigmoid : (float, 'a) t -> (float, 'a) t

sigmoid x computes the sigmoid function 1 / (1 + exp (-x)) for each element in x.

val log_sum_exp' : (float, 'a) t -> float

log_sum_exp x computes the logarithm of the sum of exponentials of all the elements in x.

val log_sum_exp : - ?axis:int -> - ?keep_dims:bool -> - (float, 'a) t -> - (float, 'a) t

log_sum_exp ~axis x computes the logarithm of the sum of exponentials of all the elements in x along axis axis.

val l1norm : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

l1norm x calculates the l1-norm of of x along specified axis.

val l1norm' : ('a, 'b) t -> 'a

l1norm x calculates the l1-norm of all the element in x.

val l2norm : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

l2norm x calculates the l2-norm of of x along specified axis.

val l2norm' : ('a, 'b) t -> 'a

l2norm x calculates the l2-norm of all the element in x.

val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> ('a, 'b) t -> ('a, 'b) t

l2norm_sqr x calculates the square l2-norm of of x along specified axis.

val l2norm_sqr' : ('a, 'b) t -> 'a

l2norm_sqr x calculates the square of l2-norm (or l2norm, Euclidean norm) of all elements in x. The function uses conjugate transpose in the product, hence it always returns a float number.

val vecnorm : - ?axis:int -> - ?p:float -> - ?keep_dims:bool -> - ('a, 'b) t -> - ('a, 'b) t

vecnorm ~axis ~p x calculates the generalised vector p-norm along the specified axis. The generalised p-norm is defined as below.

.. math:: ||v||_p = \Big \sum_{k=0}^{N-1} |v_k|^p \Big^

/p

Parameters: * axis is the axis for reduction. * p is order of norm, default value is 2. * x is the input ndarray.

Returns: * If p = infinity, then returns :math:`||v||_\infty = \max_i(|v(i)|)`. * If p = -infinity, then returns :math:`||v||_

\infty

}

= \min_i(|v(i)|)`. * Otherwise returns generalised vector p-norm defined above.

val vecnorm' : ?p:float -> ('a, 'b) t -> 'a

vecnorm' flattens the input into 1-d vector first, then calculates the generalised p-norm the same as venorm.

val cumsum : ?axis:int -> ('a, 'b) t -> ('a, 'b) t

cumsum ~axis x : performs cumulative sum of the elements along the given axis ~axis. If ~axis is None, then the cumsum is performed along the lowest dimension. The returned result however always remains the same shape.

val cumprod : ?axis:int -> ('a, 'b) t -> ('a, 'b) t

cumprod ~axis x : similar to cumsum but performs cumulative product of the elements along the given ~axis.

val cummin : ?axis:int -> ('a, 'b) t -> ('a, 'b) t

cummin ~axis x : performs cumulative min along axis dimension.

val cummax : ?axis:int -> ('a, 'b) t -> ('a, 'b) t

cummax ~axis x : performs cumulative max along axis dimension.

val diff : ?axis:int -> ?n:int -> ('a, 'b) t -> ('a, 'b) t

diff ~axis ~n x calculates the n-th difference of x along the specified axis.

Parameters: * axis: axis to calculate the difference. The default value is the highest dimension. * n: how many times to calculate the difference. The default value is 1.

Return: * The difference ndarray y. Note that the shape of y 1 less than that of x along specified axis.

val angle : (Stdlib.Complex.t, 'a) t -> (Stdlib.Complex.t, 'a) t

angle x calculates the phase angle of all complex numbers in x.

val proj : (Stdlib.Complex.t, 'a) t -> (Stdlib.Complex.t, 'a) t

proj x computes the projection on Riemann sphere of all elelments in x.

val lgamma : ('a, 'b) t -> ('a, 'b) t

lgamma x computes the loggamma of the elements in x and returns the result in a new ndarray.

val dawsn : ('a, 'b) t -> ('a, 'b) t

dawsn x computes the Dawson function of the elements in x and returns the result in a new ndarray.

val i0 : ('a, 'b) t -> ('a, 'b) t

i0 x computes the modified Bessel function of order 0 of the elements in x and returns the result in a new ndarray.

val i0e : ('a, 'b) t -> ('a, 'b) t

i0e x computes the exponentially scaled modified Bessel function of order 0 of the elements in x and returns the result in a new ndarray.

val i1 : ('a, 'b) t -> ('a, 'b) t

i1 x computes the modified Bessel function of order 1 of the elements in x and returns the result in a new ndarray.

val i1e : ('a, 'b) t -> ('a, 'b) t

i1e x computes the exponentially scaled modified Bessel function of order 1 of the elements in x and returns the result in a new ndarray.

val iv : v:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

iv v x computes modified Bessel function of x of real order v

val scalar_iv : v:'a -> ('a, 'b) t -> ('a, 'b) t

scalar_iv v x computes the modified Bessel function of x of real order v.

val iv_scalar : v:('a, 'b) t -> 'a -> ('a, 'b) t

iv_scalar v x computes modified Bessel function of x of real order v

val j0 : ('a, 'b) t -> ('a, 'b) t

j0 x computes the Bessel function of order 0 of the elements in x and returns the result in a new ndarray.

val j1 : ('a, 'b) t -> ('a, 'b) t

j1 x computes the Bessel function of order 1 of the elements in x and returns the result in a new ndarray.

val jv : v:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

jv v x computes Bessel function the first kind of x of real order v

val scalar_jv : v:'a -> ('a, 'b) t -> ('a, 'b) t

scalar_jv v x computes the Bessel function of the first kind of x of real order v.

val jv_scalar : v:('a, 'b) t -> 'a -> ('a, 'b) t

jv_scalar v x computes Bessel function of the first kind of x of real order v

Binary math operators
val add : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

add x y adds all the elements in x and y elementwise, and returns the result in a new ndarray.

General broadcast operation is automatically applied to add/sub/mul/div, etc. The function compares the dimension element-wise from the highest to the lowest with the following broadcast rules (same as numpy): 1. equal; 2. either is 1.

val sub : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

sub x y subtracts all the elements in x and y elementwise, and returns the result in a new ndarray.

val mul : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

mul x y multiplies all the elements in x and y elementwise, and returns the result in a new ndarray.

val div : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

div x y divides all the elements in x and y elementwise, and returns the result in a new ndarray.

val add_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

add_scalar x a adds a scalar value a to each element in x, and returns the result in a new ndarray.

val sub_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

sub_scalar x a subtracts a scalar value a from each element in x, and returns the result in a new ndarray.

val mul_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

mul_scalar x a multiplies each element in x by a scalar value a, and returns the result in a new ndarray.

val div_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

div_scalar x a divides each element in x by a scalar value a, and returns the result in a new ndarray.

val scalar_add : 'a -> ('a, 'b) t -> ('a, 'b) t

scalar_add a x adds a scalar value a to each element in x, and returns the result in a new ndarray.

val scalar_sub : 'a -> ('a, 'b) t -> ('a, 'b) t

scalar_sub a x subtracts each element in x from a scalar value a, and returns the result in a new ndarray.

val scalar_mul : 'a -> ('a, 'b) t -> ('a, 'b) t

scalar_mul a x multiplies each element in x by a scalar value a, and returns the result in a new ndarray.

val scalar_div : 'a -> ('a, 'b) t -> ('a, 'b) t

scalar_div a x divides a scalar value a by each element in x, and returns the result in a new ndarray.

val pow : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

pow x y computes pow(a, b) of all the elements in x and y elementwise, and returns the result in a new ndarray.

val scalar_pow : 'a -> ('a, 'b) t -> ('a, 'b) t

scalar_pow a x computes the power value of a scalar value a using the elements in a ndarray x.

val pow_scalar : ('a, 'b) t -> 'a -> ('a, 'b) t

pow_scalar x a computes each element in x power to a.

val atan2 : (float, 'a) t -> (float, 'a) t -> (float, 'a) t

atan2 x y computes atan2(a, b) of all the elements in x and y elementwise, and returns the result in a new ndarray.

val scalar_atan2 : float -> (float, 'a) t -> (float, 'a) t

scalar_atan2 a x

val atan2_scalar : (float, 'a) t -> float -> (float, 'a) t

scalar_atan2 x a

val hypot : (float, 'a) t -> (float, 'a) t -> (float, 'a) t

hypot x y computes sqrt(x*x + y*y) of all the elements in x and y elementwise, and returns the result in a new ndarray.

val min2 : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

min2 x y computes the minimum of all the elements in x and y elementwise, and returns the result in a new ndarray.

val max2 : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

max2 x y computes the maximum of all the elements in x and y elementwise, and returns the result in a new ndarray.

val fmod : (float, 'a) t -> (float, 'a) t -> (float, 'a) t

fmod x y performs float mod division.

val fmod_scalar : (float, 'a) t -> float -> (float, 'a) t

fmod_scalar x a performs mod division between x and scalar a.

val scalar_fmod : float -> (float, 'a) t -> (float, 'a) t

scalar_fmod x a performs mod division between scalar a and x.

val ssqr' : ('a, 'b) t -> 'a -> 'a

ssqr x a computes the sum of squared differences of all the elements in x from constant a. This function only computes the square of each element rather than the conjugate transpose as l2norm_sqr does.

val ssqr_diff' : ('a, 'b) t -> ('a, 'b) t -> 'a

ssqr_diff x y computes the sum of squared differences of every elements in x and its corresponding element in y.

val cross_entropy' : (float, 'a) t -> (float, 'a) t -> float

cross_entropy x y calculates the cross entropy between x and y using base e.

val clip_by_value : ?amin:'a -> ?amax:'a -> ('a, 'b) t -> ('a, 'b) t

clip_by_value ~amin ~amax x clips the elements in x based on amin and amax. The elements smaller than amin will be set to amin, and the elements greater than amax will be set to amax.

val clip_by_l2norm : 'a -> ('a, 'b) t -> ('a, 'b) t

clip_by_l2norm t x clips the x according to the threshold set by t.

val fma : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

fma x y z calculates the `fused multiply add`, i.e. (x * y) + z.

Tensor Calculus
val contract1 : (int * int) array -> ('a, 'b) t -> ('a, 'b) t

contract1 index_pairs x performs indices contraction (a.k.a tensor contraction) on x. index_pairs is an array of contracted indices.

Caveat: Not well tested yet, use with care! Also, consider to use TTGT in future for better performance.

val contract2 : (int * int) array -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

contract2 index_pairs x y performs indices contraction (a.k.a tensor contraction) on two ndarrays x and y. index_pairs is an array of contracted indices, the first element is the index of x, the second is that of y.

Caveat: Not well tested yet, use with care! Also, consider to use TTGT in future for better performance.

Cast functions
val cast : ('a, 'b) kind -> ('c, 'd) t -> ('a, 'b) t

cast kind x casts x of type ('c, 'd) t to type ('a, 'b) t specify by the passed in kind parameter. This function is a generalisation of the other casting functions such as cast_s2d, cast_c2z, and etc.

val cast_s2d : - (float, Stdlib.Bigarray.float32_elt) t -> - (float, Stdlib.Bigarray.float64_elt) t

cast_s2d x casts x from float32 to float64.

val cast_d2s : - (float, Stdlib.Bigarray.float64_elt) t -> - (float, Stdlib.Bigarray.float32_elt) t

cast_d2s x casts x from float64 to float32.

val cast_c2z : - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t

cast_c2z x casts x from complex32 to complex64.

val cast_z2c : - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t

cast_z2c x casts x from complex64 to complex32.

val cast_s2c : - (float, Stdlib.Bigarray.float32_elt) t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t

cast_s2c x casts x from float32 to complex32.

val cast_d2z : - (float, Stdlib.Bigarray.float64_elt) t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t

cast_d2z x casts x from float64 to complex64.

val cast_s2z : - (float, Stdlib.Bigarray.float32_elt) t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) t

cast_s2z x casts x from float32 to complex64.

val cast_d2c : - (float, Stdlib.Bigarray.float64_elt) t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) t

cast_d2c x casts x from float64 to complex32.

val conv1d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t

TODO

val conv2d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t

TODO

val conv3d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t

TODO

val dilated_conv1d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t

TODO

val dilated_conv2d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t

TODO

val dilated_conv3d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t

TODO

val transpose_conv1d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t

TODO

val transpose_conv2d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t

TODO

val transpose_conv3d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t

TODO

val max_pool1d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t

TODO

val max_pool2d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t

TODO

val max_pool3d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t

TODO

val avg_pool1d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t

TODO

val avg_pool2d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t

TODO

val avg_pool3d : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t

TODO

val max_pool2d_argmax : - ?padding:Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t * (int64, Stdlib.Bigarray.int64_elt) t

TODO

val upsampling2d : ('a, 'b) t -> int array -> ('a, 'b) t

TODO

val conv1d_backward_input : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val conv1d_backward_kernel : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val conv2d_backward_input : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val conv2d_backward_kernel : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val conv3d_backward_input : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val conv3d_backward_kernel : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val dilated_conv1d_backward_input : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val dilated_conv1d_backward_kernel : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val dilated_conv2d_backward_input : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val dilated_conv2d_backward_kernel : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val dilated_conv3d_backward_input : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val dilated_conv3d_backward_kernel : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val transpose_conv1d_backward_input : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val transpose_conv1d_backward_kernel : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val transpose_conv2d_backward_input : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val transpose_conv2d_backward_kernel : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val transpose_conv3d_backward_input : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val transpose_conv3d_backward_kernel : - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val max_pool1d_backward : - Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val max_pool2d_backward : - Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val max_pool3d_backward : - Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val avg_pool1d_backward : - Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val avg_pool2d_backward : - Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val avg_pool3d_backward : - Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - ('a, 'b) t

TODO

val upsampling2d_backward : ('a, 'b) t -> int array -> ('a, 'b) t -> ('a, 'b) t

TODO

Helper functions

The following functions are helper functions for some other functions in both Ndarray and Ndview modules. In general, you are not supposed to use these functions directly.

val print_element : ('a, 'b) kind -> 'a -> unit

print_element kind a prints the value of a single element.

val print_index : int array -> unit

print_index i prints out the index of an element.

val _check_transpose_axis : int array -> int -> unit

_check_transpose_axis a d checks whether a is a legiti('a, 'b) te transpose index.

val one_hot : int -> ('a, 'b) t -> ('a, 'b) t

one_hot idx depth creates one-hot vectors according to the indices ndarray and the specified depth. If idx is rank N, then the return is rank N+1. More specifically, if idx is of shape [|a;b;c|], the return is of shape [|a;b;c;depth|].

val sum_slices : ?axis:int -> ('a, 'b) t -> ('a, 'b) t

sum_slices ~axis:2 x for x of [|2;3;4;5|], it returns an ndarray of shape [|4;5|]. Currently, the operation is done using gemm, it is fast but consumes more memory.

val slide : - ?axis:int -> - ?ofs:int -> - ?step:int -> - window:int -> - ('a, 'b) t -> - ('a, 'b) t

slide ~axis ~window x generates a new ndarray by sliding a window along specified axis in x. E.g., if x has shape [|a;b;c|] and axis = 1, then [|a; number of windows; window; c|] is the shape of the returned ndarray.

Parameters: * axis is the axis for sliding, the default is -1, i.e. highest dimension. * ofs is the starting position of the sliding window. The default is 0. * step is the step size, the default is 1. * window is the size of the sliding window.

In-place modification
val create_ : out:('a, 'b) t -> 'a -> unit

TODO

val uniform_ : ?a:'a -> ?b:'a -> out:('a, 'b) t -> unit

TODO

val gaussian_ : ?mu:'a -> ?sigma:'a -> out:('a, 'b) t -> unit

TODO

val poisson_ : mu:float -> out:('a, 'b) t -> unit

TODO

val sequential_ : ?a:'a -> ?step:'a -> out:('a, 'b) t -> unit

TODO

val bernoulli_ : ?p:float -> out:('a, 'b) t -> unit

TODO

val zeros_ : out:('a, 'b) t -> unit

TODO

val ones_ : out:('a, 'b) t -> unit

TODO

val one_hot_ : out:('a, 'b) t -> int -> ('a, 'b) t -> unit

TODO

val sort_ : ('a, 'b) t -> unit

sort_ x performs in-place quicksort of the elelments in x.

val get_fancy_ : out:('a, 'b) t -> Owl_types.index list -> ('a, 'b) t -> unit

TODO

val set_fancy_ : - out:('a, 'b) t -> - Owl_types.index list -> - ('a, 'b) t -> - ('a, 'b) t -> - unit

TODO

val get_slice_ : out:('a, 'b) t -> int list list -> ('a, 'b) t -> unit

TODO

val set_slice_ : - out:('a, 'b) t -> - int list list -> - ('a, 'b) t -> - ('a, 'b) t -> - unit

TODO

val copy_ : out:('a, 'b) t -> ('a, 'b) t -> unit

copy_ ~out src copies the data from ndarray src to destination out.

val reshape_ : out:('a, 'b) t -> ('a, 'b) t -> unit

TODO

val reverse_ : out:('a, 'b) t -> ('a, 'b) t -> unit

TODO

val transpose_ : out:('a, 'b) t -> ?axis:int array -> ('a, 'b) t -> unit

transpose_ ~out x is similar to transpose x but the output is written to out.

val repeat_ : out:('a, 'b) t -> ('a, 'b) t -> int array -> unit

repeat_ ~out x reps is similar to repeat x reps but the output is written to out.

val tile_ : out:('a, 'b) t -> ('a, 'b) t -> int array -> unit

tile_ ~out x reps is similar to tile x reps but the output is written to out.

val pad_ : out:('a, 'b) t -> ?v:'a -> int list list -> ('a, 'b) t -> unit

pad_ ~out ?v p x is similar to pad ?v p x but the output is written to out.

val sum_ : out:('a, 'b) t -> axis:int -> ('a, 'b) t -> unit

TODO

val min_ : out:('a, 'b) t -> axis:int -> ('a, 'b) t -> unit

TODO

val max_ : out:('a, 'b) t -> axis:int -> ('a, 'b) t -> unit

TODO

val add_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

add_ x y is similar to add function but the output is written to out. You need to make sure out is big enough to hold the output result.

val sub_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

sub_ x y is similar to sub function but the output is written to out. You need to make sure out is big enough to hold the output result.

val mul_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

mul_ x y is similar to mul function but the output is written to out. You need to make sure out is big enough to hold the output result.

val div_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

div_ x y is similar to div function but the output is written to out. You need to make sure out is big enough to hold the output result.

val pow_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

pow_ x y is similar to pow function but the output is written to out. You need to make sure out is big enough to hold the output result.

val atan2_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

atan2_ x y is similar to atan2 function but the output is written to out. You need to make sure out is big enough to hold the output result.

val hypot_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

hypot_ x y is similar to hypot function but the output is written to out. You need to make sure out is big enough to hold the output result.

val fmod_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

fmod_ x y is similar to fmod function but the output is written to out. You need to make sure out is big enough to hold the output result.

val min2_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

min2_ x y is similar to min2 function but the output is written to out. You need to make sure out is big enough to hold the output result.

val max2_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

max2_ x y is similar to max2 function but the output is written to out. You need to make sure out is big enough to hold the output result.

val add_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

add_scalar_ x y is similar to add_scalar function but the output is written to x.

val sub_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

sub_scalar_ x y is similar to sub_scalar function but the output is written to x.

val mul_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

mul_scalar_ x y is similar to mul_scalar function but the output is written to x.

val div_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

div_scalar_ x y is similar to div_scalar function but the output is written to x.

val pow_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

pow_scalar_ x y is similar to pow_scalar function but the output is written to x.

val atan2_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

atan2_scalar_ x y is similar to atan2_scalar function but the output is written to x.

val fmod_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

fmod_scalar_ x y is similar to fmod_scalar function but the output is written to x.

val scalar_add_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

scalar_add_ a x is similar to scalar_add function but the output is written to x.

val scalar_sub_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

scalar_sub_ a x is similar to scalar_sub function but the output is written to x.

val scalar_mul_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

scalar_mul_ a x is similar to scalar_mul function but the output is written to x.

val scalar_div_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

scalar_div_ a x is similar to scalar_div function but the output is written to x.

val scalar_pow_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

scalar_pow_ a x is similar to scalar_pow function but the output is written to x.

val scalar_atan2_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

scalar_atan2_ a x is similar to scalar_atan2 function but the output is written to x.

val scalar_fmod_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

scalar_fmod_ a x is similar to scalar_fmod function but the output is written to x.

val clip_by_value_ : - ?out:('a, 'b) t -> - ?amin:'a -> - ?amax:'a -> - ('a, 'b) t -> - unit

TODO

val clip_by_l2norm_ : ?out:('a, 'b) t -> 'a -> ('a, 'b) t -> unit

TODO

val fma_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

fma_ ~out x y z is similar to fma x y z function but the output is written to out.

val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:'a -> - ?beta:'a -> - c:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - unit

Refer to :doc:`owl_dense_matrix_generic`

val conj_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

conj_ x is similar to conj but output is written to x

val abs_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

abs_ x is similar to abs but output is written to x

val neg_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

neg_ x is similar to neg but output is written to x

val reci_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

reci_ x is similar to reci but output is written to x

val signum_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

signum_ x is similar to signum but output is written to x

val sqr_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

sqr_ x is similar to sqr but output is written to x

val sqrt_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

sqrt_ x is similar to sqrt but output is written to x

val cbrt_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

cbrt_ x is similar to cbrt but output is written to x

val exp_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

exp_ x is similar to exp_ but output is written to x

val exp2_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

exp2_ x is similar to exp2 but output is written to x

val exp10_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

exp2_ x is similar to exp2 but output is written to x

val expm1_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

expm1_ x is similar to expm1 but output is written to x

val log_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

log_ x is similar to log but output is written to x

val log2_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

log2_ x is similar to log2 but output is written to x

val log10_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

log10_ x is similar to log10 but output is written to x

val log1p_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

log1p_ x is similar to log1p but output is written to x

val sin_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

sin_ x is similar to sin but output is written to x

val cos_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

cos_ x is similar to cos but output is written to x

val tan_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

tan_ x is similar to tan but output is written to x

val asin_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

asin_ x is similar to asin but output is written to x

val acos_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

acos_ x is similar to acos but output is written to x

val atan_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

atan_ x is similar to atan but output is written to x

val sinh_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

sinh_ x is similar to sinh but output is written to x

val cosh_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

cosh_ x is similar to cosh but output is written to x

val tanh_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

tanh_ x is similar to tanh but output is written to x

val asinh_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

asinh_ x is similar to asinh but output is written to x

val acosh_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

acosh_ x is similar to acosh but output is written to x

val atanh_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

atanh_ x is similar to atanh but output is written to x

val floor_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

floor_ x is similar to floor but output is written to x

val ceil_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

ceil_ x is similar to ceil but output is written to x

val round_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

round_ x is similar to round but output is written to x

val trunc_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

trunc_ x is similar to trunc but output is written to x

val fix_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

fix_ x is similar to fix but output is written to x

val erf_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

erf_ x is similar to erf but output is written to x

val erfc_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

erfc_ x is similar to erfc but output is written to x

val relu_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

relu_ x is similar to relu but output is written to x

val softplus_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

softplus_ x is similar to softplus but output is written to x

val softsign_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

softsign_ x is similar to softsign but output is written to x

val sigmoid_ : ?out:('a, 'b) t -> ('a, 'b) t -> unit

sigmoid_ x is similar to sigmoid but output is written to x

val softmax_ : ?out:('a, 'b) t -> ?axis:int -> ('a, 'b) t -> unit

softmax_ x is similar to softmax but output is written to x

val cumsum_ : ?out:('a, 'b) t -> ?axis:int -> ('a, 'b) t -> unit

cumsum_ x is similar to cumsum but output is written to x

val cumprod_ : ?out:('a, 'b) t -> ?axis:int -> ('a, 'b) t -> unit

cumprod_ x is similar to cumprod but output is written to x

val cummin_ : ?out:('a, 'b) t -> ?axis:int -> ('a, 'b) t -> unit

cummin_ x is similar to cummin but output is written to x

val cummax_ : ?out:('a, 'b) t -> ?axis:int -> ('a, 'b) t -> unit

cummax_ x is similar to cummax but output is written to x

val dropout_ : ?out:('a, 'b) t -> ?rate:float -> ('a, 'b) t -> unit

dropout_ x is similar to dropout but output is written to x

val elt_equal_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

elt_equal_ x y is similar to elt_equal function but the output is written to out. You need to make sure out is big enough to hold the output result.

val elt_not_equal_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

elt_not_equal_ x y is similar to elt_not_equal function but the output is written to out. You need to make sure out is big enough to hold the output result.

val elt_less_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

elt_less_ x y is similar to elt_less function but the output is written to out. You need to make sure out is big enough to hold the output result.

val elt_greater_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

elt_greater_ x y is similar to elt_greater function but the output is written to out. You need to make sure out is big enough to hold the output result.

val elt_less_equal_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

elt_less_equal_ x y is similar to elt_less_equal function but the output is written to out. You need to make sure out is big enough to hold the output result.

val elt_greater_equal_ : ?out:('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> unit

elt_greater_equal_ x y is similar to elt_greater_equal function but the output is written to out. You need to make sure out is big enough to hold the output result.

val elt_equal_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

elt_equal_scalar_ x a is similar to elt_equal_scalar function but the output is written to x.

val elt_not_equal_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

elt_not_equal_scalar_ x a is similar to elt_not_equal_scalar function but the output is written to x.

val elt_less_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

elt_less_scalar_ x a is similar to elt_less_scalar function but the output is written to x.

val elt_greater_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

elt_greater_scalar_ x a is similar to elt_greater_scalar function but the output is written to x.

val elt_less_equal_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

elt_less_equal_scalar_ x a is similar to elt_less_equal_scalar function but the output is written to x.

val elt_greater_equal_scalar_ : ?out:('a, 'b) t -> ('a, 'b) t -> 'a -> unit

elt_greater_equal_scalar_ x a is similar to elt_greater_equal_scalar function but the output is written to x.

val conv1d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - unit

TODO

val conv2d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - unit

TODO

val conv3d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - unit

TODO

val dilated_conv1d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - unit

TODO

val dilated_conv2d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - unit

TODO

val dilated_conv3d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - unit

TODO

val transpose_conv1d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - unit

TODO

val transpose_conv2d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - unit

TODO

val transpose_conv3d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - unit

TODO

val max_pool1d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - unit

TODO

val max_pool2d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - unit

TODO

val max_pool3d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - unit

TODO

val avg_pool1d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - unit

TODO

val avg_pool2d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - unit

TODO

val avg_pool3d_ : - out:('a, 'b) t -> - ?padding:Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - unit

TODO

val upsampling2d_ : out:('a, 'b) t -> ('a, 'b) t -> int array -> unit

TODO

val conv1d_backward_input_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - unit

TODO

val conv1d_backward_kernel_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - unit

TODO

val conv2d_backward_input_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - unit

TODO

val conv2d_backward_kernel_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - unit

TODO

val conv3d_backward_input_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - unit

TODO

val conv3d_backward_kernel_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - unit

TODO

val dilated_conv1d_backward_input_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - unit

TODO

val dilated_conv1d_backward_kernel_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - unit

TODO

val dilated_conv2d_backward_input_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - unit

TODO

val dilated_conv2d_backward_kernel_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - unit

TODO

val dilated_conv3d_backward_input_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - unit

TODO

val dilated_conv3d_backward_kernel_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - unit

TODO

val transpose_conv1d_backward_input_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - unit

TODO

val transpose_conv1d_backward_kernel_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - unit

TODO

val transpose_conv2d_backward_input_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - unit

TODO

val transpose_conv2d_backward_kernel_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - unit

TODO

val transpose_conv3d_backward_input_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - unit

TODO

val transpose_conv3d_backward_kernel_ : - out:('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - unit

TODO

val max_pool1d_backward_ : - out:('a, 'b) t -> - Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - unit

TODO

val max_pool2d_backward_ : - out:('a, 'b) t -> - Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - unit

TODO

val max_pool3d_backward_ : - out:('a, 'b) t -> - Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - unit

TODO

val avg_pool1d_backward_ : - out:('a, 'b) t -> - Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - unit

TODO

val avg_pool2d_backward_ : - out:('a, 'b) t -> - Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - unit

TODO

val avg_pool3d_backward_ : - out:('a, 'b) t -> - Owl_types.padding -> - ('a, 'b) t -> - int array -> - int array -> - ('a, 'b) t -> - unit

TODO

val upsampling2d_backward_ : - out:('a, 'b) t -> - ('a, 'b) t -> - int array -> - ('a, 'b) t -> - unit

TODO

val fused_adagrad_ : ?out:('a, 'b) t -> rate:'a -> eps:'a -> ('a, 'b) t -> unit

TODO

Matrix functions
type area = Owl_dense_ndarray_generic.area = {
  1. a : int;
  2. b : int;
  3. c : int;
  4. d : int;
}

Refer to :doc:`owl_dense_matrix_generic`

val area : int -> int -> int -> int -> area

Refer to :doc:`owl_dense_matrix_generic`

val copy_area_to : ('a, 'b) t -> area -> ('a, 'b) t -> area -> unit

Refer to :doc:`owl_dense_matrix_generic`

val row_num : ('a, 'b) t -> int

Refer to :doc:`owl_dense_matrix_generic`

val col_num : ('a, 'b) t -> int

Refer to :doc:`owl_dense_matrix_generic`

val row : ('a, 'b) t -> int -> ('a, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val col : ('a, 'b) t -> int -> ('a, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val rows : ('a, 'b) t -> int array -> ('a, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val cols : ('a, 'b) t -> int array -> ('a, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val copy_row_to : ('a, 'b) t -> ('a, 'b) t -> int -> unit

Refer to :doc:`owl_dense_matrix_generic`

val copy_col_to : ('a, 'b) t -> ('a, 'b) t -> int -> unit

Refer to :doc:`owl_dense_matrix_generic`

val dot : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val diag : ?k:int -> ('a, 'b) t -> ('a, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val trace : ('a, 'b) t -> 'a

Refer to :doc:`owl_dense_matrix_generic`

val to_rows : ('a, 'b) t -> ('a, 'b) t array

Refer to :doc:`owl_dense_matrix_generic`

val of_rows : ('a, 'b) t array -> ('a, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val to_cols : ('a, 'b) t -> ('a, 'b) t array

Refer to :doc:`owl_dense_matrix_generic`

val of_cols : ('a, 'b) t array -> ('a, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val to_arrays : ('a, 'b) t -> 'a array array

Refer to :doc:`owl_dense_matrix_generic`

val of_arrays : ('a, 'b) kind -> 'a array array -> ('a, 'b) t

Refer to :doc:`owl_dense_matrix_generic`

val draw_rows : - ?replacement:bool -> - ('a, 'b) t -> - int -> - ('a, 'b) t * int array

Refer to :doc:`owl_dense_matrix_generic`

val draw_cols : - ?replacement:bool -> - ('a, 'b) t -> - int -> - ('a, 'b) t * int array

Refer to :doc:`owl_dense_matrix_generic`

val draw_rows2 : - ?replacement:bool -> - ('a, 'b) t -> - ('a, 'b) t -> - int -> - ('a, 'b) t * ('a, 'b) t * int array

Refer to :doc:`owl_dense_matrix_generic`

val draw_cols2 : - ?replacement:bool -> - ('a, 'b) t -> - ('a, 'b) t -> - int -> - ('a, 'b) t * ('a, 'b) t * int array

Refer to :doc:`owl_dense_matrix_generic`

Helper functions
val float_to_elt : 'a -> 'a

Identity function to deal with the type conversion required by other functors.

val elt_to_float : 'a -> 'a

Identity function to deal with the type conversion required by other functors.

include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (%) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (%$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (+=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (.!{;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
include sig ... end
val (.%{}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
val mpow : - ('a, 'b) Owl_linalg_generic.t -> - float -> - ('a, 'b) Owl_linalg_generic.t
\ No newline at end of file diff --git a/owl/Owl_dense_ndarray/Operator/index.html b/owl/Owl_dense_ndarray/Operator/index.html deleted file mode 100644 index b5a184463..000000000 --- a/owl/Owl_dense_ndarray/Operator/index.html +++ /dev/null @@ -1,171 +0,0 @@ - -Operator (owl.Owl_dense_ndarray.Operator)

Module Owl_dense_ndarray.Operator

include sig ... end
val (+$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (%) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (%$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (+=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (.!{;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
include sig ... end
val (.%{}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
\ No newline at end of file diff --git a/owl/Owl_dense_ndarray/S/index.html b/owl/Owl_dense_ndarray/S/index.html deleted file mode 100644 index d01a50c69..000000000 --- a/owl/Owl_dense_ndarray/S/index.html +++ /dev/null @@ -1,579 +0,0 @@ - -S (owl.Owl_dense_ndarray.S)

Module Owl_dense_ndarray.S

include module type of struct include Owl_dense_ndarray_s end
type elt = float
type arr = - (float, Stdlib.Bigarray.float32_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
include Owl_dense_ndarray_intf.Common with type elt := elt and type arr := arr
include Owl_base_dense_ndarray_intf.Common - with type elt := elt - with type arr := arr
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
Create N-dimensional array
val linspace : elt -> elt -> int -> arr

linspace k 0. 9. 10 ...

val logspace : ?base:float -> elt -> elt -> int -> arr

logspace k 0. 9. 10 ...

val unit_basis : int -> int -> arr

unit_basis k n i returns a unit basis vector with ith element set to 1.

Obtain basic properties
val num_dims : arr -> int
val nth_dim : arr -> int -> int
val nnz : arr -> int
val density : arr -> float
val size_in_bytes : arr -> int
val same_shape : arr -> arr -> bool
val same_data : arr -> arr -> bool
val ind : arr -> int -> int array
val i1d : arr -> int array -> int
Manipulate a N-dimensional array
val get_index : arr -> int array array -> elt array
val set_index : arr -> int array array -> elt array -> unit
val get_fancy : Owl_types.index list -> arr -> arr
val set_fancy : Owl_types.index list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val sub_ndarray : int array -> arr -> arr array
val slice_left : arr -> int array -> arr
val fill : arr -> elt -> unit
val resize : ?head:bool -> arr -> int array -> arr
val flip : ?axis:int -> arr -> arr
val rotate : arr -> int -> arr
val swap : int -> int -> arr -> arr
val concat_vertical : arr -> arr -> arr
val concat_horizontal : arr -> arr -> arr
val concat_vh : arr array array -> arr
val split_vh : (int * int) array array -> arr -> arr array array
val dropout : ?rate:float -> arr -> arr
val top : arr -> int -> int array array
val bottom : arr -> int -> int array array
val sort : arr -> arr
val sort1 : ?axis:int -> arr -> arr
val argsort : - arr -> - (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val mmap : Unix.file_descr -> ?pos:int64 -> bool -> int array -> arr
Iterate array elements
val iter2i : (int -> elt -> elt -> unit) -> arr -> arr -> unit
val iter2 : (elt -> elt -> unit) -> arr -> arr -> unit
val map2i : (int -> elt -> elt -> elt) -> arr -> arr -> arr
val map2 : (elt -> elt -> elt) -> arr -> arr -> arr
val iteri_nd : (int array -> elt -> unit) -> arr -> unit
val mapi_nd : (int array -> elt -> elt) -> arr -> arr
val foldi_nd : - ?axis:int -> - (int array -> elt -> elt -> elt) -> - elt -> - arr -> - arr
val scani_nd : ?axis:int -> (int array -> elt -> elt -> elt) -> arr -> arr
val filteri_nd : (int array -> elt -> bool) -> arr -> int array array
val iter2i_nd : (int array -> elt -> elt -> unit) -> arr -> arr -> unit
val map2i_nd : (int array -> elt -> elt -> elt) -> arr -> arr -> arr
val iteri_slice : ?axis:int -> (int -> arr -> unit) -> arr -> unit
val iter_slice : ?axis:int -> (arr -> unit) -> arr -> unit
val mapi_slice : ?axis:int -> (int -> arr -> 'c) -> arr -> 'c array
val map_slice : ?axis:int -> (arr -> 'c) -> arr -> 'c array
val filteri_slice : ?axis:int -> (int -> arr -> bool) -> arr -> arr array
val filter_slice : ?axis:int -> (arr -> bool) -> arr -> arr array
val foldi_slice : ?axis:int -> (int -> 'c -> arr -> 'c) -> 'c -> arr -> 'c
val fold_slice : ?axis:int -> ('c -> arr -> 'c) -> 'c -> arr -> 'c
Examine array elements or compare two arrays
Input/Output functions
val to_array : arr -> elt array
val save : out:string -> arr -> unit
val load : string -> arr
val save_npy : out:string -> arr -> unit
val load_npy : string -> arr
Unary mathematical operations
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod' : arr -> elt
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean' : arr -> elt
val median' : arr -> elt
val median : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var' : arr -> elt
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std' : arr -> elt
val sem : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sem' : arr -> elt
val minmax : ?axis:int -> ?keep_dims:bool -> arr -> arr * arr
val minmax' : arr -> elt * elt
val min_i : arr -> elt * int array
val max_i : arr -> elt * int array
val minmax_i : arr -> (elt * int array) * (elt * int array)
val abs2 : arr -> arr
val conj : arr -> arr
val reci : arr -> arr
val reci_tol : ?tol:elt -> arr -> arr
val cbrt : arr -> arr
val exp2 : arr -> arr
val exp10 : arr -> arr
val expm1 : arr -> arr
val log1p : arr -> arr
val trunc : arr -> arr
val fix : arr -> arr
val modf : arr -> arr * arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> arr -> arr
val vecnorm : ?axis:int -> ?p:float -> ?keep_dims:bool -> arr -> arr
val vecnorm' : ?p:float -> arr -> elt
val cumsum : ?axis:int -> arr -> arr
val cumprod : ?axis:int -> arr -> arr
val cummin : ?axis:int -> arr -> arr
val cummax : ?axis:int -> arr -> arr
val diff : ?axis:int -> ?n:int -> arr -> arr
val lgamma : arr -> arr
Binary mathematical operations
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val ssqr' : arr -> elt -> elt
val ssqr_diff' : arr -> arr -> elt
Tensor Calculus
val contract1 : (int * int) array -> arr -> arr
val contract2 : (int * int) array -> arr -> arr -> arr
Experimental functions
val slide : ?axis:int -> ?ofs:int -> ?step:int -> window:int -> arr -> arr
Functions of in-place modification
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:float -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val sort_ : arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_fancy_ : out:arr -> Owl_types.index list -> arr -> unit
val set_fancy_ : out:arr -> Owl_types.index list -> arr -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val conj_ : ?out:arr -> arr -> unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit

Matrix functions

val col : arr -> int -> arr
val cols : arr -> int array -> arr
val to_arrays : arr -> elt array array
val draw_rows : ?replacement:bool -> arr -> int -> arr * int array
val draw_cols : ?replacement:bool -> arr -> int -> arr * int array
val draw_rows2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
val draw_cols2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
include Owl_dense_ndarray_intf.Real with type elt := elt and type arr := arr
include Owl_base_dense_ndarray_intf.Real - with type elt := elt - with type arr := arr
val sum_slices : ?axis:int -> arr -> arr
val signum : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val atan2 : arr -> arr -> arr
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> float -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> float -> arr
val dot : arr -> arr -> arr
val trace : arr -> elt
Helper functions
val float_to_elt : float -> elt
val elt_to_float : elt -> float
Real operations
val i0 : arr -> arr
val i0e : arr -> arr
val i1 : arr -> arr
val i1e : arr -> arr
val iv : v:arr -> arr -> arr
val scalar_iv : v:elt -> arr -> arr
val iv_scalar : v:arr -> elt -> arr
val j0 : arr -> arr
val j1 : arr -> arr
val jv : v:arr -> arr -> arr
val scalar_jv : v:elt -> arr -> arr
val jv_scalar : v:arr -> elt -> arr
val erf : arr -> arr
val erfc : arr -> arr
val logistic : arr -> arr
val elu : ?alpha:elt -> arr -> arr
val leaky_relu : ?alpha:elt -> arr -> arr
val softplus : arr -> arr
val softsign : arr -> arr
val softmax : ?axis:int -> arr -> arr
val sigmoid : arr -> arr
val log_sum_exp' : arr -> float
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val fmod_scalar : arr -> elt -> arr
val scalar_fmod : elt -> arr -> arr
val cross_entropy' : arr -> arr -> float
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
val poisson : mu:elt -> int array -> arr
val poisson_ : mu:elt -> out:arr -> unit
include Owl_dense_ndarray_intf.NN with type arr := arr
include Owl_base_dense_ndarray_intf.NN with type arr := arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val max_pool2d_argmax : - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - arr - * (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (%) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (%$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (+=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (.!{;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
include sig ... end
val (.%{}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
val mpow : Owl_linalg_s.mat -> float -> Owl_linalg_s.mat
\ No newline at end of file diff --git a/owl/Owl_dense_ndarray/Z/index.html b/owl/Owl_dense_ndarray/Z/index.html deleted file mode 100644 index 1c42942cf..000000000 --- a/owl/Owl_dense_ndarray/Z/index.html +++ /dev/null @@ -1,582 +0,0 @@ - -Z (owl.Owl_dense_ndarray.Z)

Module Owl_dense_ndarray.Z

include module type of struct include Owl_dense_ndarray_z end
type elt = Stdlib.Complex.t
type arr = - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
type cast_arr = - (float, Stdlib.Bigarray.float64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
include Owl_dense_ndarray_intf.Common with type elt := elt and type arr := arr
include Owl_base_dense_ndarray_intf.Common - with type elt := elt - with type arr := arr
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
Create N-dimensional array
val linspace : elt -> elt -> int -> arr

linspace k 0. 9. 10 ...

val logspace : ?base:float -> elt -> elt -> int -> arr

logspace k 0. 9. 10 ...

val unit_basis : int -> int -> arr

unit_basis k n i returns a unit basis vector with ith element set to 1.

Obtain basic properties
val num_dims : arr -> int
val nth_dim : arr -> int -> int
val nnz : arr -> int
val density : arr -> float
val size_in_bytes : arr -> int
val same_shape : arr -> arr -> bool
val same_data : arr -> arr -> bool
val ind : arr -> int -> int array
val i1d : arr -> int array -> int
Manipulate a N-dimensional array
val get_index : arr -> int array array -> elt array
val set_index : arr -> int array array -> elt array -> unit
val get_fancy : Owl_types.index list -> arr -> arr
val set_fancy : Owl_types.index list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val sub_ndarray : int array -> arr -> arr array
val slice_left : arr -> int array -> arr
val fill : arr -> elt -> unit
val resize : ?head:bool -> arr -> int array -> arr
val flip : ?axis:int -> arr -> arr
val rotate : arr -> int -> arr
val swap : int -> int -> arr -> arr
val concat_vertical : arr -> arr -> arr
val concat_horizontal : arr -> arr -> arr
val concat_vh : arr array array -> arr
val split_vh : (int * int) array array -> arr -> arr array array
val dropout : ?rate:float -> arr -> arr
val top : arr -> int -> int array array
val bottom : arr -> int -> int array array
val sort : arr -> arr
val sort1 : ?axis:int -> arr -> arr
val argsort : - arr -> - (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val mmap : Unix.file_descr -> ?pos:int64 -> bool -> int array -> arr
Iterate array elements
val iter2i : (int -> elt -> elt -> unit) -> arr -> arr -> unit
val iter2 : (elt -> elt -> unit) -> arr -> arr -> unit
val map2i : (int -> elt -> elt -> elt) -> arr -> arr -> arr
val map2 : (elt -> elt -> elt) -> arr -> arr -> arr
val iteri_nd : (int array -> elt -> unit) -> arr -> unit
val mapi_nd : (int array -> elt -> elt) -> arr -> arr
val foldi_nd : - ?axis:int -> - (int array -> elt -> elt -> elt) -> - elt -> - arr -> - arr
val scani_nd : ?axis:int -> (int array -> elt -> elt -> elt) -> arr -> arr
val filteri_nd : (int array -> elt -> bool) -> arr -> int array array
val iter2i_nd : (int array -> elt -> elt -> unit) -> arr -> arr -> unit
val map2i_nd : (int array -> elt -> elt -> elt) -> arr -> arr -> arr
val iteri_slice : ?axis:int -> (int -> arr -> unit) -> arr -> unit
val iter_slice : ?axis:int -> (arr -> unit) -> arr -> unit
val mapi_slice : ?axis:int -> (int -> arr -> 'c) -> arr -> 'c array
val map_slice : ?axis:int -> (arr -> 'c) -> arr -> 'c array
val filteri_slice : ?axis:int -> (int -> arr -> bool) -> arr -> arr array
val filter_slice : ?axis:int -> (arr -> bool) -> arr -> arr array
val foldi_slice : ?axis:int -> (int -> 'c -> arr -> 'c) -> 'c -> arr -> 'c
val fold_slice : ?axis:int -> ('c -> arr -> 'c) -> 'c -> arr -> 'c
Examine array elements or compare two arrays
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> elt -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> elt -> arr
Input/Output functions
val to_array : arr -> elt array
val save : out:string -> arr -> unit
val load : string -> arr
val save_npy : out:string -> arr -> unit
val load_npy : string -> arr
Unary mathematical operations
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod' : arr -> elt
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean' : arr -> elt
val median' : arr -> elt
val median : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var' : arr -> elt
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std' : arr -> elt
val sem : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sem' : arr -> elt
val minmax : ?axis:int -> ?keep_dims:bool -> arr -> arr * arr
val minmax' : arr -> elt * elt
val min_i : arr -> elt * int array
val max_i : arr -> elt * int array
val minmax_i : arr -> (elt * int array) * (elt * int array)
val abs2 : arr -> arr
val conj : arr -> arr
val reci : arr -> arr
val reci_tol : ?tol:elt -> arr -> arr
val cbrt : arr -> arr
val exp2 : arr -> arr
val exp10 : arr -> arr
val expm1 : arr -> arr
val log1p : arr -> arr
val trunc : arr -> arr
val fix : arr -> arr
val modf : arr -> arr * arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm' : arr -> elt
val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm_sqr' : arr -> elt
val vecnorm : ?axis:int -> ?p:float -> ?keep_dims:bool -> arr -> arr
val vecnorm' : ?p:float -> arr -> elt
val cumsum : ?axis:int -> arr -> arr
val cumprod : ?axis:int -> arr -> arr
val cummin : ?axis:int -> arr -> arr
val cummax : ?axis:int -> arr -> arr
val diff : ?axis:int -> ?n:int -> arr -> arr
val lgamma : arr -> arr
Binary mathematical operations
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val ssqr' : arr -> elt -> elt
val ssqr_diff' : arr -> arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
Tensor Calculus
val contract1 : (int * int) array -> arr -> arr
val contract2 : (int * int) array -> arr -> arr -> arr
Experimental functions
val sum_slices : ?axis:int -> arr -> arr
val slide : ?axis:int -> ?ofs:int -> ?step:int -> window:int -> arr -> arr
Functions of in-place modification
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:float -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val sort_ : arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_fancy_ : out:arr -> Owl_types.index list -> arr -> unit
val set_fancy_ : out:arr -> Owl_types.index list -> arr -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val conj_ : ?out:arr -> arr -> unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit

Matrix functions

val col : arr -> int -> arr
val cols : arr -> int array -> arr
val dot : arr -> arr -> arr
val trace : arr -> elt
val to_arrays : arr -> elt array array
val draw_rows : ?replacement:bool -> arr -> int -> arr * int array
val draw_cols : ?replacement:bool -> arr -> int -> arr * int array
val draw_rows2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
val draw_cols2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
include Owl_dense_ndarray_intf.NN with type arr := arr
include Owl_base_dense_ndarray_intf.NN with type arr := arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val max_pool2d_argmax : - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - arr - * (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
include Owl_dense_ndarray_intf.Complex - with type elt := elt - and type arr := arr - and type cast_arr := cast_arr
Complex operations
val complex : cast_arr -> cast_arr -> arr

complex re im constructs a complex ndarray/matrix from re and im. re and im contain the real and imaginary part of x respectively.

Note that both re and im can be complex but must have same type. The real part of re will be the real part of x and the imaginary part of im will be the imaginary part of x.

val polar : cast_arr -> cast_arr -> arr

polar rho theta constructs a complex ndarray/matrix from polar coordinates rho and theta. rho contains the magnitudes and theta contains phase angles. Note that the behaviour is undefined if rho has negative elelments or theta has infinity elelments.

val re : arr -> cast_arr
val im : arr -> cast_arr
val sum' : arr -> elt
include module type of struct include Operator end
include sig ... end
val (+$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (-$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (*$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (/$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($+) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($-) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($*) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val ($/) : - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (!=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (>=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (<=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
include sig ... end
val (=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (!=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (<=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (>=$) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (!=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (<=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (>=.$) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - bool
val (=~$) : ?eps:float -> ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> bool
val (=~.) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (=~.$) : - ?eps:float -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - 'a -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (%) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (%$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**) : - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val ($**) : - float -> - (float, 'a) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t
val (**$) : - (float, 'a) Owl_dense_ndarray_generic.t -> - float -> - (float, 'a) Owl_dense_ndarray_generic.t
val (+=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (-=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (*=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (/=) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (+$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (-$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (*$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (/$=) : ('a, 'b) Owl_dense_ndarray_generic.t -> 'a -> unit
val (.!{;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.!{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - Owl_types.index array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${;..}) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t
val (.${}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
val (.${;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int list array -> - ('a, 'b) Owl_dense_ndarray_generic.t -> - unit
include sig ... end
val (.%{}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a
val (.%{;..}) : ('a, 'b) Owl_dense_ndarray_generic.t -> int array -> 'a
val (.%{}<-) : ('a, 'b) Owl_dense_ndarray_generic.t -> int -> 'a -> unit
val (.%{;..}<-) : - ('a, 'b) Owl_dense_ndarray_generic.t -> - int array -> - 'a -> - unit
val mpow : Owl_linalg_z.mat -> float -> Owl_linalg_z.mat
\ No newline at end of file diff --git a/owl/Owl_dense_ndarray_a/.dummy b/owl/Owl_dense_ndarray_a/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_ndarray_c/.dummy b/owl/Owl_dense_ndarray_c/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_ndarray_d/.dummy b/owl/Owl_dense_ndarray_d/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_ndarray_generic/.dummy b/owl/Owl_dense_ndarray_generic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_ndarray_intf/.dummy b/owl/Owl_dense_ndarray_intf/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_ndarray_intf/module-type-Common/index.html b/owl/Owl_dense_ndarray_intf/module-type-Common/index.html deleted file mode 100644 index 15cd0c99a..000000000 --- a/owl/Owl_dense_ndarray_intf/module-type-Common/index.html +++ /dev/null @@ -1,34 +0,0 @@ - -Common (owl.Owl_dense_ndarray_intf.Common)

Module type Owl_dense_ndarray_intf.Common

include Owl_base_dense_ndarray_intf.Common
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:float -> int array -> arr
val shape : arr -> int array
val numel : arr -> int
val strides : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val slice_size : arr -> int array

Refer to :doc:`owl_dense_ndarray_generic`

val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val flatten : arr -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val squeeze : ?axis:int array -> arr -> arr
val expand : ?hi:bool -> arr -> int -> arr
val split : ?axis:int -> int array -> arr -> arr array
val draw : ?axis:int -> arr -> int -> arr * int array
val pad : ?v:elt -> int list list -> arr -> arr
val one_hot : int -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
Iterate array elements
val iteri : (int -> elt -> unit) -> arr -> unit
val iter : (elt -> unit) -> arr -> unit
val mapi : (int -> elt -> elt) -> arr -> arr
val map : (elt -> elt) -> arr -> arr
val filteri : (int -> elt -> bool) -> arr -> int array
val filter : (elt -> bool) -> arr -> int array
val foldi : ?axis:int -> (int -> elt -> elt -> elt) -> elt -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scani : ?axis:int -> (int -> elt -> elt -> elt) -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
Examination & Comparison
val exists : (elt -> bool) -> arr -> bool
val not_exists : (elt -> bool) -> arr -> bool
val for_all : (elt -> bool) -> arr -> bool
val is_zero : arr -> bool
val is_positive : arr -> bool
val is_negative : arr -> bool
val is_nonpositive : arr -> bool
val is_nonnegative : arr -> bool
val is_normal : arr -> bool
val not_nan : arr -> bool
val not_inf : arr -> bool
val equal : arr -> arr -> bool
val not_equal : arr -> arr -> bool
val greater : arr -> arr -> bool
val less : arr -> arr -> bool
val greater_equal : arr -> arr -> bool
val less_equal : arr -> arr -> bool
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val equal_scalar : arr -> elt -> bool
val not_equal_scalar : arr -> elt -> bool
val less_scalar : arr -> elt -> bool
val greater_scalar : arr -> elt -> bool
val less_equal_scalar : arr -> elt -> bool
val greater_equal_scalar : arr -> elt -> bool
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val diag : ?k:int -> arr -> arr
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
Create N-dimensional array
val linspace : elt -> elt -> int -> arr

linspace k 0. 9. 10 ...

val logspace : ?base:float -> elt -> elt -> int -> arr

logspace k 0. 9. 10 ...

val unit_basis : int -> int -> arr

unit_basis k n i returns a unit basis vector with ith element set to 1.

Obtain basic properties
val num_dims : arr -> int
val nth_dim : arr -> int -> int
val nnz : arr -> int
val density : arr -> float
val size_in_bytes : arr -> int
val same_shape : arr -> arr -> bool
val same_data : arr -> arr -> bool
val ind : arr -> int -> int array
val i1d : arr -> int array -> int
Manipulate a N-dimensional array
val get_index : arr -> int array array -> elt array
val set_index : arr -> int array array -> elt array -> unit
val get_fancy : Owl_types.index list -> arr -> arr
val set_fancy : Owl_types.index list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val sub_ndarray : int array -> arr -> arr array
val slice_left : arr -> int array -> arr
val fill : arr -> elt -> unit
val resize : ?head:bool -> arr -> int array -> arr
val flip : ?axis:int -> arr -> arr
val rotate : arr -> int -> arr
val swap : int -> int -> arr -> arr
val concat_vertical : arr -> arr -> arr
val concat_horizontal : arr -> arr -> arr
val concat_vh : arr array array -> arr
val split_vh : (int * int) array array -> arr -> arr array array
val dropout : ?rate:float -> arr -> arr
val top : arr -> int -> int array array
val bottom : arr -> int -> int array array
val sort : arr -> arr
val sort1 : ?axis:int -> arr -> arr
val argsort : - arr -> - (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val mmap : Unix.file_descr -> ?pos:int64 -> bool -> int array -> arr
Iterate array elements
val iter2i : (int -> elt -> elt -> unit) -> arr -> arr -> unit
val iter2 : (elt -> elt -> unit) -> arr -> arr -> unit
val map2i : (int -> elt -> elt -> elt) -> arr -> arr -> arr
val map2 : (elt -> elt -> elt) -> arr -> arr -> arr
val iteri_nd : (int array -> elt -> unit) -> arr -> unit
val mapi_nd : (int array -> elt -> elt) -> arr -> arr
val foldi_nd : - ?axis:int -> - (int array -> elt -> elt -> elt) -> - elt -> - arr -> - arr
val scani_nd : ?axis:int -> (int array -> elt -> elt -> elt) -> arr -> arr
val filteri_nd : (int array -> elt -> bool) -> arr -> int array array
val iter2i_nd : (int array -> elt -> elt -> unit) -> arr -> arr -> unit
val map2i_nd : (int array -> elt -> elt -> elt) -> arr -> arr -> arr
val iteri_slice : ?axis:int -> (int -> arr -> unit) -> arr -> unit
val iter_slice : ?axis:int -> (arr -> unit) -> arr -> unit
val mapi_slice : ?axis:int -> (int -> arr -> 'c) -> arr -> 'c array
val map_slice : ?axis:int -> (arr -> 'c) -> arr -> 'c array
val filteri_slice : ?axis:int -> (int -> arr -> bool) -> arr -> arr array
val filter_slice : ?axis:int -> (arr -> bool) -> arr -> arr array
val foldi_slice : ?axis:int -> (int -> 'c -> arr -> 'c) -> 'c -> arr -> 'c
val fold_slice : ?axis:int -> ('c -> arr -> 'c) -> 'c -> arr -> 'c
Examine array elements or compare two arrays
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> elt -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> elt -> arr
Input/Output functions
val to_array : arr -> elt array
val save : out:string -> arr -> unit
val load : string -> arr
val save_npy : out:string -> arr -> unit
val load_npy : string -> arr
Unary mathematical operations
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod' : arr -> elt
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean' : arr -> elt
val median' : arr -> elt
val median : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var' : arr -> elt
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std' : arr -> elt
val sem : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sem' : arr -> elt
val minmax : ?axis:int -> ?keep_dims:bool -> arr -> arr * arr
val minmax' : arr -> elt * elt
val min_i : arr -> elt * int array
val max_i : arr -> elt * int array
val minmax_i : arr -> (elt * int array) * (elt * int array)
val abs2 : arr -> arr
val conj : arr -> arr
val reci : arr -> arr
val reci_tol : ?tol:elt -> arr -> arr
val cbrt : arr -> arr
val exp2 : arr -> arr
val exp10 : arr -> arr
val expm1 : arr -> arr
val log1p : arr -> arr
val trunc : arr -> arr
val fix : arr -> arr
val modf : arr -> arr * arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm' : arr -> elt
val l2norm_sqr : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm_sqr' : arr -> elt
val vecnorm : ?axis:int -> ?p:float -> ?keep_dims:bool -> arr -> arr
val vecnorm' : ?p:float -> arr -> elt
val cumsum : ?axis:int -> arr -> arr
val cumprod : ?axis:int -> arr -> arr
val cummin : ?axis:int -> arr -> arr
val cummax : ?axis:int -> arr -> arr
val diff : ?axis:int -> ?n:int -> arr -> arr
val lgamma : arr -> arr
Binary mathematical operations
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val ssqr' : arr -> elt -> elt
val ssqr_diff' : arr -> arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
Tensor Calculus
val contract1 : (int * int) array -> arr -> arr
val contract2 : (int * int) array -> arr -> arr -> arr
Experimental functions
val sum_slices : ?axis:int -> arr -> arr
val slide : ?axis:int -> ?ofs:int -> ?step:int -> window:int -> arr -> arr
Functions of in-place modification
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:float -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val sort_ : arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_fancy_ : out:arr -> Owl_types.index list -> arr -> unit
val set_fancy_ : out:arr -> Owl_types.index list -> arr -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val conj_ : ?out:arr -> arr -> unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit

Matrix functions

val col : arr -> int -> arr
val cols : arr -> int array -> arr
val dot : arr -> arr -> arr
val trace : arr -> elt
val to_arrays : arr -> elt array array
val draw_rows : ?replacement:bool -> arr -> int -> arr * int array
val draw_cols : ?replacement:bool -> arr -> int -> arr * int array
val draw_rows2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
val draw_cols2 : - ?replacement:bool -> - arr -> - arr -> - int -> - arr * arr * int array
\ No newline at end of file diff --git a/owl/Owl_dense_ndarray_intf/module-type-Complex/index.html b/owl/Owl_dense_ndarray_intf/module-type-Complex/index.html deleted file mode 100644 index 166b6bc0e..000000000 --- a/owl/Owl_dense_ndarray_intf/module-type-Complex/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Complex (owl.Owl_dense_ndarray_intf.Complex)

Module type Owl_dense_ndarray_intf.Complex

type elt
type arr
type cast_arr
Complex operations
val complex : cast_arr -> cast_arr -> arr

complex re im constructs a complex ndarray/matrix from re and im. re and im contain the real and imaginary part of x respectively.

Note that both re and im can be complex but must have same type. The real part of re will be the real part of x and the imaginary part of im will be the imaginary part of x.

val polar : cast_arr -> cast_arr -> arr

polar rho theta constructs a complex ndarray/matrix from polar coordinates rho and theta. rho contains the magnitudes and theta contains phase angles. Note that the behaviour is undefined if rho has negative elelments or theta has infinity elelments.

val re : arr -> cast_arr
val im : arr -> cast_arr
val sum' : arr -> elt
\ No newline at end of file diff --git a/owl/Owl_dense_ndarray_intf/module-type-Distribution/index.html b/owl/Owl_dense_ndarray_intf/module-type-Distribution/index.html deleted file mode 100644 index 026ee16fe..000000000 --- a/owl/Owl_dense_ndarray_intf/module-type-Distribution/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Distribution (owl.Owl_dense_ndarray_intf.Distribution)

Module type Owl_dense_ndarray_intf.Distribution

type arr
Stats & distribution functions
val uniform_rvs : a:arr -> b:arr -> n:int -> arr
val uniform_pdf : a:arr -> b:arr -> arr -> arr
val uniform_logpdf : a:arr -> b:arr -> arr -> arr
val uniform_cdf : a:arr -> b:arr -> arr -> arr
val uniform_logcdf : a:arr -> b:arr -> arr -> arr
val uniform_ppf : a:arr -> b:arr -> arr -> arr
val uniform_sf : a:arr -> b:arr -> arr -> arr
val uniform_logsf : a:arr -> b:arr -> arr -> arr
val uniform_isf : a:arr -> b:arr -> arr -> arr
val gaussian_rvs : mu:arr -> sigma:arr -> n:int -> arr
val gaussian_pdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logpdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_cdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logcdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_ppf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_sf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logsf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_isf : mu:arr -> sigma:arr -> arr -> arr
val exponential_rvs : lambda:arr -> n:int -> arr
val exponential_pdf : lambda:arr -> arr -> arr
val exponential_logpdf : lambda:arr -> arr -> arr
val exponential_cdf : lambda:arr -> arr -> arr
val exponential_logcdf : lambda:arr -> arr -> arr
val exponential_ppf : lambda:arr -> arr -> arr
val exponential_sf : lambda:arr -> arr -> arr
val exponential_logsf : lambda:arr -> arr -> arr
val exponential_isf : lambda:arr -> arr -> arr
val gamma_rvs : shape:arr -> scale:arr -> n:int -> arr
val gamma_pdf : shape:arr -> scale:arr -> arr -> arr
val gamma_logpdf : shape:arr -> scale:arr -> arr -> arr
val gamma_cdf : shape:arr -> scale:arr -> arr -> arr
val gamma_logcdf : shape:arr -> scale:arr -> arr -> arr
val gamma_ppf : shape:arr -> scale:arr -> arr -> arr
val gamma_sf : shape:arr -> scale:arr -> arr -> arr
val gamma_logsf : shape:arr -> scale:arr -> arr -> arr
val gamma_isf : shape:arr -> scale:arr -> arr -> arr
val beta_rvs : a:arr -> b:arr -> n:int -> arr
val beta_pdf : a:arr -> b:arr -> arr -> arr
val beta_logpdf : a:arr -> b:arr -> arr -> arr
val beta_cdf : a:arr -> b:arr -> arr -> arr
val beta_logcdf : a:arr -> b:arr -> arr -> arr
val beta_ppf : a:arr -> b:arr -> arr -> arr
val beta_sf : a:arr -> b:arr -> arr -> arr
val beta_logsf : a:arr -> b:arr -> arr -> arr
val beta_isf : a:arr -> b:arr -> arr -> arr
val chi2_rvs : df:arr -> n:int -> arr
val chi2_pdf : df:arr -> arr -> arr
val chi2_logpdf : df:arr -> arr -> arr
val chi2_cdf : df:arr -> arr -> arr
val chi2_logcdf : df:arr -> arr -> arr
val chi2_ppf : df:arr -> arr -> arr
val chi2_sf : df:arr -> arr -> arr
val chi2_logsf : df:arr -> arr -> arr
val chi2_isf : df:arr -> arr -> arr
val f_rvs : dfnum:arr -> dfden:arr -> n:int -> arr
val f_pdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logpdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_cdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logcdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_ppf : dfnum:arr -> dfden:arr -> arr -> arr
val f_sf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logsf : dfnum:arr -> dfden:arr -> arr -> arr
val f_isf : dfnum:arr -> dfden:arr -> arr -> arr
val cauchy_rvs : loc:arr -> scale:arr -> n:int -> arr
val cauchy_pdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logpdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_cdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logcdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_ppf : loc:arr -> scale:arr -> arr -> arr
val cauchy_sf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logsf : loc:arr -> scale:arr -> arr -> arr
val cauchy_isf : loc:arr -> scale:arr -> arr -> arr
val lomax_rvs : shape:arr -> scale:arr -> n:int -> arr
val lomax_pdf : shape:arr -> scale:arr -> arr -> arr
val lomax_logpdf : shape:arr -> scale:arr -> arr -> arr
val lomax_cdf : shape:arr -> scale:arr -> arr -> arr
val lomax_logcdf : shape:arr -> scale:arr -> arr -> arr
val lomax_ppf : shape:arr -> scale:arr -> arr -> arr
val lomax_sf : shape:arr -> scale:arr -> arr -> arr
val lomax_logsf : shape:arr -> scale:arr -> arr -> arr
val lomax_isf : shape:arr -> scale:arr -> arr -> arr
val weibull_rvs : shape:arr -> scale:arr -> n:int -> arr
val weibull_pdf : shape:arr -> scale:arr -> arr -> arr
val weibull_logpdf : shape:arr -> scale:arr -> arr -> arr
val weibull_cdf : shape:arr -> scale:arr -> arr -> arr
val weibull_logcdf : shape:arr -> scale:arr -> arr -> arr
val weibull_ppf : shape:arr -> scale:arr -> arr -> arr
val weibull_sf : shape:arr -> scale:arr -> arr -> arr
val weibull_logsf : shape:arr -> scale:arr -> arr -> arr
val weibull_isf : shape:arr -> scale:arr -> arr -> arr
val laplace_rvs : loc:arr -> scale:arr -> n:int -> arr
val laplace_pdf : loc:arr -> scale:arr -> arr -> arr
val laplace_logpdf : loc:arr -> scale:arr -> arr -> arr
val laplace_cdf : loc:arr -> scale:arr -> arr -> arr
val laplace_logcdf : loc:arr -> scale:arr -> arr -> arr
val laplace_ppf : loc:arr -> scale:arr -> arr -> arr
val laplace_sf : loc:arr -> scale:arr -> arr -> arr
val laplace_logsf : loc:arr -> scale:arr -> arr -> arr
val laplace_isf : loc:arr -> scale:arr -> arr -> arr
val gumbel1_rvs : a:arr -> b:arr -> n:int -> arr
val gumbel1_pdf : a:arr -> b:arr -> arr -> arr
val gumbel1_logpdf : a:arr -> b:arr -> arr -> arr
val gumbel1_cdf : a:arr -> b:arr -> arr -> arr
val gumbel1_logcdf : a:arr -> b:arr -> arr -> arr
val gumbel1_ppf : a:arr -> b:arr -> arr -> arr
val gumbel1_sf : a:arr -> b:arr -> arr -> arr
val gumbel1_logsf : a:arr -> b:arr -> arr -> arr
val gumbel1_isf : a:arr -> b:arr -> arr -> arr
val gumbel2_rvs : a:arr -> b:arr -> n:int -> arr
val gumbel2_pdf : a:arr -> b:arr -> arr -> arr
val gumbel2_logpdf : a:arr -> b:arr -> arr -> arr
val gumbel2_cdf : a:arr -> b:arr -> arr -> arr
val gumbel2_logcdf : a:arr -> b:arr -> arr -> arr
val gumbel2_ppf : a:arr -> b:arr -> arr -> arr
val gumbel2_sf : a:arr -> b:arr -> arr -> arr
val gumbel2_logsf : a:arr -> b:arr -> arr -> arr
val gumbel2_isf : a:arr -> b:arr -> arr -> arr
val logistic_rvs : loc:arr -> scale:arr -> n:int -> arr
val logistic_pdf : loc:arr -> scale:arr -> arr -> arr
val logistic_logpdf : loc:arr -> scale:arr -> arr -> arr
val logistic_cdf : loc:arr -> scale:arr -> arr -> arr
val logistic_logcdf : loc:arr -> scale:arr -> arr -> arr
val logistic_ppf : loc:arr -> scale:arr -> arr -> arr
val logistic_sf : loc:arr -> scale:arr -> arr -> arr
val logistic_logsf : loc:arr -> scale:arr -> arr -> arr
val logistic_isf : loc:arr -> scale:arr -> arr -> arr
val lognormal_rvs : mu:arr -> sigma:arr -> n:int -> arr
val lognormal_pdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logpdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_cdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logcdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_ppf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_sf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logsf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_isf : mu:arr -> sigma:arr -> arr -> arr
val rayleigh_rvs : sigma:arr -> n:int -> arr
val rayleigh_pdf : sigma:arr -> arr -> arr
val rayleigh_logpdf : sigma:arr -> arr -> arr
val rayleigh_cdf : sigma:arr -> arr -> arr
val rayleigh_logcdf : sigma:arr -> arr -> arr
val rayleigh_ppf : sigma:arr -> arr -> arr
val rayleigh_sf : sigma:arr -> arr -> arr
val rayleigh_logsf : sigma:arr -> arr -> arr
val rayleigh_isf : sigma:arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_dense_ndarray_intf/module-type-NN/index.html b/owl/Owl_dense_ndarray_intf/module-type-NN/index.html deleted file mode 100644 index 9a09e5580..000000000 --- a/owl/Owl_dense_ndarray_intf/module-type-NN/index.html +++ /dev/null @@ -1,372 +0,0 @@ - -NN (owl.Owl_dense_ndarray_intf.NN)

Module type Owl_dense_ndarray_intf.NN

include Owl_base_dense_ndarray_intf.NN
type arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val max_pool2d_argmax : - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - arr - * (int64, Stdlib.Bigarray.int64_elt, Stdlib.Bigarray.c_layout) - Stdlib.Bigarray.Genarray.t
val conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
\ No newline at end of file diff --git a/owl/Owl_dense_ndarray_intf/module-type-Real/index.html b/owl/Owl_dense_ndarray_intf/module-type-Real/index.html deleted file mode 100644 index 404d49962..000000000 --- a/owl/Owl_dense_ndarray_intf/module-type-Real/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Real (owl.Owl_dense_ndarray_intf.Real)

Module type Owl_dense_ndarray_intf.Real

include Owl_base_dense_ndarray_intf.Real
type elt
type arr
val sum_slices : ?axis:int -> arr -> arr
val signum : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val atan2 : arr -> arr -> arr
val approx_equal : ?eps:float -> arr -> arr -> bool
val approx_equal_scalar : ?eps:float -> arr -> float -> bool
val approx_elt_equal : ?eps:float -> arr -> arr -> arr
val approx_elt_equal_scalar : ?eps:float -> arr -> float -> arr
val dot : arr -> arr -> arr
val trace : arr -> elt
Helper functions
val float_to_elt : float -> elt
val elt_to_float : elt -> float
Real operations
val i0 : arr -> arr
val i0e : arr -> arr
val i1 : arr -> arr
val i1e : arr -> arr
val iv : v:arr -> arr -> arr
val scalar_iv : v:elt -> arr -> arr
val iv_scalar : v:arr -> elt -> arr
val j0 : arr -> arr
val j1 : arr -> arr
val jv : v:arr -> arr -> arr
val scalar_jv : v:elt -> arr -> arr
val jv_scalar : v:arr -> elt -> arr
val erf : arr -> arr
val erfc : arr -> arr
val logistic : arr -> arr
val elu : ?alpha:elt -> arr -> arr
val leaky_relu : ?alpha:elt -> arr -> arr
val softplus : arr -> arr
val softsign : arr -> arr
val softmax : ?axis:int -> arr -> arr
val sigmoid : arr -> arr
val log_sum_exp' : arr -> float
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val fmod_scalar : arr -> elt -> arr
val scalar_fmod : elt -> arr -> arr
val cross_entropy' : arr -> arr -> float
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
val poisson : mu:elt -> int array -> arr
val poisson_ : mu:elt -> out:arr -> unit
\ No newline at end of file diff --git a/owl/Owl_dense_ndarray_s/.dummy b/owl/Owl_dense_ndarray_s/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_dense_ndarray_z/.dummy b/owl/Owl_dense_ndarray_z/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_distribution/.dummy b/owl/Owl_distribution/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_distribution/Make/Beta/index.html b/owl/Owl_distribution/Make/Beta/index.html deleted file mode 100644 index f8cc2b86d..000000000 --- a/owl/Owl_distribution/Make/Beta/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Beta (owl.Owl_distribution.Make.Beta)

Module Make.Beta

type t = {
  1. a : A.arr;
  2. b : A.arr;
}

Type definition of a specific distribution

val make : a:A.arr -> b:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Cauchy/index.html b/owl/Owl_distribution/Make/Cauchy/index.html deleted file mode 100644 index 9bc6aa4ba..000000000 --- a/owl/Owl_distribution/Make/Cauchy/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Cauchy (owl.Owl_distribution.Make.Cauchy)

Module Make.Cauchy

type t = {
  1. loc : A.arr;
  2. scale : A.arr;
}

Type definition of a specific distribution

val make : loc:A.arr -> scale:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Chi2/index.html b/owl/Owl_distribution/Make/Chi2/index.html deleted file mode 100644 index a21afcf1c..000000000 --- a/owl/Owl_distribution/Make/Chi2/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Chi2 (owl.Owl_distribution.Make.Chi2)

Module Make.Chi2

type t = {
  1. df : A.arr;
}

Type definition of a specific distribution

val make : df:A.arr -> _sigma:'a -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Exponential/index.html b/owl/Owl_distribution/Make/Exponential/index.html deleted file mode 100644 index e523a22eb..000000000 --- a/owl/Owl_distribution/Make/Exponential/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Exponential (owl.Owl_distribution.Make.Exponential)

Module Make.Exponential

type t = {
  1. lambda : A.arr;
}

Type definition of a specific distribution

val make : lambda:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/F/index.html b/owl/Owl_distribution/Make/F/index.html deleted file mode 100644 index 469b5ee89..000000000 --- a/owl/Owl_distribution/Make/F/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -F (owl.Owl_distribution.Make.F)

Module Make.F

type t = {
  1. dfnum : A.arr;
  2. dfden : A.arr;
}

Type definition of a specific distribution

val make : dfnum:A.arr -> dfden:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Gamma/index.html b/owl/Owl_distribution/Make/Gamma/index.html deleted file mode 100644 index 9fdce06b6..000000000 --- a/owl/Owl_distribution/Make/Gamma/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Gamma (owl.Owl_distribution.Make.Gamma)

Module Make.Gamma

type t = {
  1. shape : A.arr;
  2. scale : A.arr;
}

Type definition of a specific distribution

val make : shape:A.arr -> scale:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Gaussian/index.html b/owl/Owl_distribution/Make/Gaussian/index.html deleted file mode 100644 index da2d740ad..000000000 --- a/owl/Owl_distribution/Make/Gaussian/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Gaussian (owl.Owl_distribution.Make.Gaussian)

Module Make.Gaussian

type t = {
  1. mu : A.arr;
  2. sigma : A.arr;
}

Type definition of a specific distribution

val make : mu:A.arr -> sigma:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Gumbel1/index.html b/owl/Owl_distribution/Make/Gumbel1/index.html deleted file mode 100644 index 8e291d867..000000000 --- a/owl/Owl_distribution/Make/Gumbel1/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Gumbel1 (owl.Owl_distribution.Make.Gumbel1)

Module Make.Gumbel1

type t = {
  1. a : A.arr;
  2. b : A.arr;
}

Type definition of a specific distribution

val make : a:A.arr -> b:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Gumbel2/index.html b/owl/Owl_distribution/Make/Gumbel2/index.html deleted file mode 100644 index 7e1089c27..000000000 --- a/owl/Owl_distribution/Make/Gumbel2/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Gumbel2 (owl.Owl_distribution.Make.Gumbel2)

Module Make.Gumbel2

type t = {
  1. a : A.arr;
  2. b : A.arr;
}

Type definition of a specific distribution

val make : a:A.arr -> b:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Laplace/index.html b/owl/Owl_distribution/Make/Laplace/index.html deleted file mode 100644 index 1dfcc12fa..000000000 --- a/owl/Owl_distribution/Make/Laplace/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Laplace (owl.Owl_distribution.Make.Laplace)

Module Make.Laplace

type t = {
  1. loc : A.arr;
  2. scale : A.arr;
}

Type definition of a specific distribution

val make : loc:A.arr -> scale:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Logistic/index.html b/owl/Owl_distribution/Make/Logistic/index.html deleted file mode 100644 index 8951fd1ff..000000000 --- a/owl/Owl_distribution/Make/Logistic/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Logistic (owl.Owl_distribution.Make.Logistic)

Module Make.Logistic

type t = {
  1. loc : A.arr;
  2. scale : A.arr;
}

Type definition of a specific distribution

val make : loc:A.arr -> scale:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Lognormal/index.html b/owl/Owl_distribution/Make/Lognormal/index.html deleted file mode 100644 index d9d52e093..000000000 --- a/owl/Owl_distribution/Make/Lognormal/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Lognormal (owl.Owl_distribution.Make.Lognormal)

Module Make.Lognormal

type t = {
  1. mu : A.arr;
  2. sigma : A.arr;
}

Type definition of a specific distribution

val make : mu:A.arr -> sigma:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Lomax/index.html b/owl/Owl_distribution/Make/Lomax/index.html deleted file mode 100644 index c38457ff6..000000000 --- a/owl/Owl_distribution/Make/Lomax/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Lomax (owl.Owl_distribution.Make.Lomax)

Module Make.Lomax

type t = {
  1. shape : A.arr;
  2. scale : A.arr;
}

Type definition of a specific distribution

val make : shape:A.arr -> scale:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Poisson/index.html b/owl/Owl_distribution/Make/Poisson/index.html deleted file mode 100644 index a71bb42d3..000000000 --- a/owl/Owl_distribution/Make/Poisson/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Poisson (owl.Owl_distribution.Make.Poisson)

Module Make.Poisson

type t = {
  1. mu : A.arr;
}

Type definition of a specific distribution

val make : mu:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Rayleigh/index.html b/owl/Owl_distribution/Make/Rayleigh/index.html deleted file mode 100644 index 3708139cb..000000000 --- a/owl/Owl_distribution/Make/Rayleigh/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Rayleigh (owl.Owl_distribution.Make.Rayleigh)

Module Make.Rayleigh

type t = {
  1. sigma : A.arr;
}

Type definition of a specific distribution

val make : sigma:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Uniform/index.html b/owl/Owl_distribution/Make/Uniform/index.html deleted file mode 100644 index 6d8cda225..000000000 --- a/owl/Owl_distribution/Make/Uniform/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Uniform (owl.Owl_distribution.Make.Uniform)

Module Make.Uniform

type t = {
  1. a : A.arr;
  2. b : A.arr;
}

Type definition of a specific distribution

val make : a:A.arr -> b:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/Weibull/index.html b/owl/Owl_distribution/Make/Weibull/index.html deleted file mode 100644 index dcea1a663..000000000 --- a/owl/Owl_distribution/Make/Weibull/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Weibull (owl.Owl_distribution.Make.Weibull)

Module Make.Weibull

type t = {
  1. shape : A.arr;
  2. scale : A.arr;
}

Type definition of a specific distribution

val make : shape:A.arr -> scale:A.arr -> t

Make a distribution of the given parameters.

val sample : t -> int -> A.arr

Sample a distribution of the given parameters.

val pdf : t -> A.arr -> A.arr

Probability density/mass function of the distribution.

val logpdf : t -> A.arr -> A.arr

Logarithm of the probability density/mass function of the distribution.

val cdf : t -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : t -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

val ppf : t -> A.arr -> A.arr

Percentile function of the distribution.

val sf : t -> A.arr -> A.arr

Survival function of the distribution.

val logsf : t -> A.arr -> A.arr

Logarithm of the survival function of the distribution.

val isf : t -> A.arr -> A.arr

Inverse survival function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution/Make/argument-1-A/Linalg/index.html b/owl/Owl_distribution/Make/argument-1-A/Linalg/index.html deleted file mode 100644 index b625f9b2c..000000000 --- a/owl/Owl_distribution/Make/argument-1-A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_distribution.Make.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_distribution/Make/argument-1-A/Mat/index.html b/owl/Owl_distribution/Make/argument-1-A/Mat/index.html deleted file mode 100644 index 3daeaeb5c..000000000 --- a/owl/Owl_distribution/Make/argument-1-A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_distribution.Make.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_distribution/Make/argument-1-A/Scalar/index.html b/owl/Owl_distribution/Make/argument-1-A/Scalar/index.html deleted file mode 100644 index 3db0b6574..000000000 --- a/owl/Owl_distribution/Make/argument-1-A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_distribution.Make.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_distribution/Make/argument-1-A/index.html b/owl/Owl_distribution/Make/argument-1-A/index.html deleted file mode 100644 index 24855b14e..000000000 --- a/owl/Owl_distribution/Make/argument-1-A/index.html +++ /dev/null @@ -1,379 +0,0 @@ - -A (owl.Owl_distribution.Make.A)

Parameter Make.A

include Owl_types_stats_dist.Sig
include Owl_types_ndarray_mutable.Sig
include Owl_types_ndarray_algodiff.Sig
include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
val create_ : out:arr -> elt -> unit
val uniform_ : ?a:elt -> ?b:elt -> out:arr -> unit
val gaussian_ : ?mu:elt -> ?sigma:elt -> out:arr -> unit
val sequential_ : ?a:elt -> ?step:elt -> out:arr -> unit
val bernoulli_ : ?p:elt -> out:arr -> unit
val zeros_ : out:arr -> unit
val ones_ : out:arr -> unit
val one_hot_ : out:arr -> int -> arr -> unit
val get_slice_ : out:arr -> int list list -> arr -> unit
val set_slice_ : out:arr -> int list list -> arr -> arr -> unit
val sub_left : arr -> int -> int -> arr
val reshape_ : out:arr -> arr -> unit
val reverse_ : out:arr -> arr -> unit
val transpose_ : out:arr -> ?axis:int array -> arr -> unit
val repeat_ : out:arr -> arr -> int array -> unit
val tile_ : out:arr -> arr -> int array -> unit
val pad_ : out:arr -> ?v:elt -> int list list -> arr -> unit
val hypot : arr -> arr -> arr
val fmod : arr -> arr -> arr
val min2 : arr -> arr -> arr
val max2 : arr -> arr -> arr
val add_ : ?out:arr -> arr -> arr -> unit
val sub_ : ?out:arr -> arr -> arr -> unit
val mul_ : ?out:arr -> arr -> arr -> unit
val div_ : ?out:arr -> arr -> arr -> unit
val pow_ : ?out:arr -> arr -> arr -> unit
val atan2_ : ?out:arr -> arr -> arr -> unit
val hypot_ : ?out:arr -> arr -> arr -> unit
val fmod_ : ?out:arr -> arr -> arr -> unit
val min2_ : ?out:arr -> arr -> arr -> unit
val max2_ : ?out:arr -> arr -> arr -> unit
val add_scalar_ : ?out:arr -> arr -> elt -> unit
val sub_scalar_ : ?out:arr -> arr -> elt -> unit
val mul_scalar_ : ?out:arr -> arr -> elt -> unit
val div_scalar_ : ?out:arr -> arr -> elt -> unit
val pow_scalar_ : ?out:arr -> arr -> elt -> unit
val atan2_scalar_ : ?out:arr -> arr -> elt -> unit
val fmod_scalar_ : ?out:arr -> arr -> elt -> unit
val scalar_add_ : ?out:arr -> elt -> arr -> unit
val scalar_sub_ : ?out:arr -> elt -> arr -> unit
val scalar_mul_ : ?out:arr -> elt -> arr -> unit
val scalar_div_ : ?out:arr -> elt -> arr -> unit
val scalar_pow_ : ?out:arr -> elt -> arr -> unit
val scalar_atan2_ : ?out:arr -> elt -> arr -> unit
val scalar_fmod_ : ?out:arr -> elt -> arr -> unit
val fma_ : ?out:arr -> arr -> arr -> arr -> unit
val clip_by_value_ : ?out:arr -> ?amin:elt -> ?amax:elt -> arr -> unit
val clip_by_l2norm_ : ?out:arr -> elt -> arr -> unit
val dot_ : - ?transa:bool -> - ?transb:bool -> - ?alpha:elt -> - ?beta:elt -> - c:arr -> - arr -> - arr -> - unit
val abs_ : ?out:arr -> arr -> unit
val neg_ : ?out:arr -> arr -> unit
val conj_ : ?out:arr -> arr -> unit
val reci_ : ?out:arr -> arr -> unit
val signum_ : ?out:arr -> arr -> unit
val sqr_ : ?out:arr -> arr -> unit
val sqrt_ : ?out:arr -> arr -> unit
val cbrt_ : ?out:arr -> arr -> unit
val exp_ : ?out:arr -> arr -> unit
val exp2_ : ?out:arr -> arr -> unit
val exp10_ : ?out:arr -> arr -> unit
val expm1_ : ?out:arr -> arr -> unit
val log_ : ?out:arr -> arr -> unit
val log2_ : ?out:arr -> arr -> unit
val log10_ : ?out:arr -> arr -> unit
val log1p_ : ?out:arr -> arr -> unit
val sin_ : ?out:arr -> arr -> unit
val cos_ : ?out:arr -> arr -> unit
val tan_ : ?out:arr -> arr -> unit
val asin_ : ?out:arr -> arr -> unit
val acos_ : ?out:arr -> arr -> unit
val atan_ : ?out:arr -> arr -> unit
val sinh_ : ?out:arr -> arr -> unit
val cosh_ : ?out:arr -> arr -> unit
val tanh_ : ?out:arr -> arr -> unit
val asinh_ : ?out:arr -> arr -> unit
val acosh_ : ?out:arr -> arr -> unit
val atanh_ : ?out:arr -> arr -> unit
val floor_ : ?out:arr -> arr -> unit
val ceil_ : ?out:arr -> arr -> unit
val round_ : ?out:arr -> arr -> unit
val trunc_ : ?out:arr -> arr -> unit
val fix_ : ?out:arr -> arr -> unit
val erf_ : ?out:arr -> arr -> unit
val erfc_ : ?out:arr -> arr -> unit
val relu_ : ?out:arr -> arr -> unit
val softplus_ : ?out:arr -> arr -> unit
val softsign_ : ?out:arr -> arr -> unit
val softmax_ : ?out:arr -> ?axis:int -> arr -> unit
val sigmoid_ : ?out:arr -> arr -> unit
val sum_ : out:arr -> axis:int -> arr -> unit
val min_ : out:arr -> axis:int -> arr -> unit
val max_ : out:arr -> axis:int -> arr -> unit
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val prod : ?axis:int -> ?keep_dims:bool -> arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val mean : ?axis:int -> ?keep_dims:bool -> arr -> arr
val var : ?axis:int -> ?keep_dims:bool -> arr -> arr
val std : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l2norm : ?axis:int -> ?keep_dims:bool -> arr -> arr
val cumsum_ : ?out:arr -> ?axis:int -> arr -> unit
val cumprod_ : ?out:arr -> ?axis:int -> arr -> unit
val cummin_ : ?out:arr -> ?axis:int -> arr -> unit
val cummax_ : ?out:arr -> ?axis:int -> arr -> unit
val dropout_ : ?out:arr -> ?rate:float -> arr -> unit
val prod' : arr -> elt
val mean' : arr -> elt
val var' : arr -> elt
val std' : arr -> elt
val elt_equal_ : ?out:arr -> arr -> arr -> unit
val elt_not_equal_ : ?out:arr -> arr -> arr -> unit
val elt_less_ : ?out:arr -> arr -> arr -> unit
val elt_greater_ : ?out:arr -> arr -> arr -> unit
val elt_less_equal_ : ?out:arr -> arr -> arr -> unit
val elt_greater_equal_ : ?out:arr -> arr -> arr -> unit
val elt_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_not_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_less_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val elt_greater_equal_scalar_ : ?out:arr -> arr -> elt -> unit
val conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val dilated_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val dilated_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - unit
val transpose_conv1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val transpose_conv3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - unit
val max_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val max_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool1d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool2d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val avg_pool3d_ : - out:arr -> - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - unit
val upsampling2d_ : out:arr -> arr -> int array -> unit
val conv1d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv1d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv2d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_input_ : out:arr -> arr -> arr -> int array -> arr -> unit
val conv3d_backward_kernel_ : out:arr -> arr -> arr -> int array -> arr -> unit
val dilated_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val dilated_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - int array -> - arr -> - unit
val transpose_conv1d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv1d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv2d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_input_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val transpose_conv3d_backward_kernel_ : - out:arr -> - arr -> - arr -> - int array -> - arr -> - unit
val max_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val max_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool1d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool2d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val avg_pool3d_backward_ : - out:arr -> - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - unit
val upsampling2d_backward_ : out:arr -> arr -> int array -> arr -> unit
val fused_adagrad_ : ?out:arr -> rate:float -> eps:float -> arr -> unit
val uniform_rvs : a:arr -> b:arr -> n:int -> arr
val uniform_pdf : a:arr -> b:arr -> arr -> arr
val uniform_logpdf : a:arr -> b:arr -> arr -> arr
val uniform_cdf : a:arr -> b:arr -> arr -> arr
val uniform_logcdf : a:arr -> b:arr -> arr -> arr
val uniform_ppf : a:arr -> b:arr -> arr -> arr
val uniform_sf : a:arr -> b:arr -> arr -> arr
val uniform_logsf : a:arr -> b:arr -> arr -> arr
val uniform_isf : a:arr -> b:arr -> arr -> arr
val gaussian_rvs : mu:arr -> sigma:arr -> n:int -> arr
val gaussian_pdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logpdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_cdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logcdf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_ppf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_sf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_logsf : mu:arr -> sigma:arr -> arr -> arr
val gaussian_isf : mu:arr -> sigma:arr -> arr -> arr
val exponential_rvs : lambda:arr -> n:int -> arr
val exponential_pdf : lambda:arr -> arr -> arr
val exponential_logpdf : lambda:arr -> arr -> arr
val exponential_cdf : lambda:arr -> arr -> arr
val exponential_logcdf : lambda:arr -> arr -> arr
val exponential_ppf : lambda:arr -> arr -> arr
val exponential_sf : lambda:arr -> arr -> arr
val exponential_logsf : lambda:arr -> arr -> arr
val exponential_isf : lambda:arr -> arr -> arr
val poisson_rvs : mu:arr -> n:int -> arr
val gamma_rvs : shape:arr -> scale:arr -> n:int -> arr
val gamma_pdf : shape:arr -> scale:arr -> arr -> arr
val gamma_logpdf : shape:arr -> scale:arr -> arr -> arr
val gamma_cdf : shape:arr -> scale:arr -> arr -> arr
val gamma_logcdf : shape:arr -> scale:arr -> arr -> arr
val gamma_ppf : shape:arr -> scale:arr -> arr -> arr
val gamma_sf : shape:arr -> scale:arr -> arr -> arr
val gamma_logsf : shape:arr -> scale:arr -> arr -> arr
val gamma_isf : shape:arr -> scale:arr -> arr -> arr
val beta_rvs : a:arr -> b:arr -> n:int -> arr
val beta_pdf : a:arr -> b:arr -> arr -> arr
val beta_logpdf : a:arr -> b:arr -> arr -> arr
val beta_cdf : a:arr -> b:arr -> arr -> arr
val beta_logcdf : a:arr -> b:arr -> arr -> arr
val beta_ppf : a:arr -> b:arr -> arr -> arr
val beta_sf : a:arr -> b:arr -> arr -> arr
val beta_logsf : a:arr -> b:arr -> arr -> arr
val beta_isf : a:arr -> b:arr -> arr -> arr
val chi2_rvs : df:arr -> n:int -> arr
val chi2_pdf : df:arr -> arr -> arr
val chi2_logpdf : df:arr -> arr -> arr
val chi2_cdf : df:arr -> arr -> arr
val chi2_logcdf : df:arr -> arr -> arr
val chi2_ppf : df:arr -> arr -> arr
val chi2_sf : df:arr -> arr -> arr
val chi2_logsf : df:arr -> arr -> arr
val chi2_isf : df:arr -> arr -> arr
val f_rvs : dfnum:arr -> dfden:arr -> n:int -> arr
val f_pdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logpdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_cdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logcdf : dfnum:arr -> dfden:arr -> arr -> arr
val f_ppf : dfnum:arr -> dfden:arr -> arr -> arr
val f_sf : dfnum:arr -> dfden:arr -> arr -> arr
val f_logsf : dfnum:arr -> dfden:arr -> arr -> arr
val f_isf : dfnum:arr -> dfden:arr -> arr -> arr
val cauchy_rvs : loc:arr -> scale:arr -> n:int -> arr
val cauchy_pdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logpdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_cdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logcdf : loc:arr -> scale:arr -> arr -> arr
val cauchy_ppf : loc:arr -> scale:arr -> arr -> arr
val cauchy_sf : loc:arr -> scale:arr -> arr -> arr
val cauchy_logsf : loc:arr -> scale:arr -> arr -> arr
val cauchy_isf : loc:arr -> scale:arr -> arr -> arr
val lomax_rvs : shape:arr -> scale:arr -> n:int -> arr
val lomax_pdf : shape:arr -> scale:arr -> arr -> arr
val lomax_logpdf : shape:arr -> scale:arr -> arr -> arr
val lomax_cdf : shape:arr -> scale:arr -> arr -> arr
val lomax_logcdf : shape:arr -> scale:arr -> arr -> arr
val lomax_ppf : shape:arr -> scale:arr -> arr -> arr
val lomax_sf : shape:arr -> scale:arr -> arr -> arr
val lomax_logsf : shape:arr -> scale:arr -> arr -> arr
val lomax_isf : shape:arr -> scale:arr -> arr -> arr
val weibull_rvs : shape:arr -> scale:arr -> n:int -> arr
val weibull_pdf : shape:arr -> scale:arr -> arr -> arr
val weibull_logpdf : shape:arr -> scale:arr -> arr -> arr
val weibull_cdf : shape:arr -> scale:arr -> arr -> arr
val weibull_logcdf : shape:arr -> scale:arr -> arr -> arr
val weibull_ppf : shape:arr -> scale:arr -> arr -> arr
val weibull_sf : shape:arr -> scale:arr -> arr -> arr
val weibull_logsf : shape:arr -> scale:arr -> arr -> arr
val weibull_isf : shape:arr -> scale:arr -> arr -> arr
val laplace_rvs : loc:arr -> scale:arr -> n:int -> arr
val laplace_pdf : loc:arr -> scale:arr -> arr -> arr
val laplace_logpdf : loc:arr -> scale:arr -> arr -> arr
val laplace_cdf : loc:arr -> scale:arr -> arr -> arr
val laplace_logcdf : loc:arr -> scale:arr -> arr -> arr
val laplace_ppf : loc:arr -> scale:arr -> arr -> arr
val laplace_sf : loc:arr -> scale:arr -> arr -> arr
val laplace_logsf : loc:arr -> scale:arr -> arr -> arr
val laplace_isf : loc:arr -> scale:arr -> arr -> arr
val gumbel1_rvs : a:arr -> b:arr -> n:int -> arr
val gumbel1_pdf : a:arr -> b:arr -> arr -> arr
val gumbel1_logpdf : a:arr -> b:arr -> arr -> arr
val gumbel1_cdf : a:arr -> b:arr -> arr -> arr
val gumbel1_logcdf : a:arr -> b:arr -> arr -> arr
val gumbel1_ppf : a:arr -> b:arr -> arr -> arr
val gumbel1_sf : a:arr -> b:arr -> arr -> arr
val gumbel1_logsf : a:arr -> b:arr -> arr -> arr
val gumbel1_isf : a:arr -> b:arr -> arr -> arr
val gumbel2_rvs : a:arr -> b:arr -> n:int -> arr
val gumbel2_pdf : a:arr -> b:arr -> arr -> arr
val gumbel2_logpdf : a:arr -> b:arr -> arr -> arr
val gumbel2_cdf : a:arr -> b:arr -> arr -> arr
val gumbel2_logcdf : a:arr -> b:arr -> arr -> arr
val gumbel2_ppf : a:arr -> b:arr -> arr -> arr
val gumbel2_sf : a:arr -> b:arr -> arr -> arr
val gumbel2_logsf : a:arr -> b:arr -> arr -> arr
val gumbel2_isf : a:arr -> b:arr -> arr -> arr
val logistic_rvs : loc:arr -> scale:arr -> n:int -> arr
val logistic_pdf : loc:arr -> scale:arr -> arr -> arr
val logistic_logpdf : loc:arr -> scale:arr -> arr -> arr
val logistic_cdf : loc:arr -> scale:arr -> arr -> arr
val logistic_logcdf : loc:arr -> scale:arr -> arr -> arr
val logistic_ppf : loc:arr -> scale:arr -> arr -> arr
val logistic_sf : loc:arr -> scale:arr -> arr -> arr
val logistic_logsf : loc:arr -> scale:arr -> arr -> arr
val logistic_isf : loc:arr -> scale:arr -> arr -> arr
val lognormal_rvs : mu:arr -> sigma:arr -> n:int -> arr
val lognormal_pdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logpdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_cdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logcdf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_ppf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_sf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_logsf : mu:arr -> sigma:arr -> arr -> arr
val lognormal_isf : mu:arr -> sigma:arr -> arr -> arr
val rayleigh_rvs : sigma:arr -> n:int -> arr
val rayleigh_pdf : sigma:arr -> arr -> arr
val rayleigh_logpdf : sigma:arr -> arr -> arr
val rayleigh_cdf : sigma:arr -> arr -> arr
val rayleigh_logcdf : sigma:arr -> arr -> arr
val rayleigh_ppf : sigma:arr -> arr -> arr
val rayleigh_sf : sigma:arr -> arr -> arr
val rayleigh_logsf : sigma:arr -> arr -> arr
val rayleigh_isf : sigma:arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_distribution/Make/index.html b/owl/Owl_distribution/Make/index.html deleted file mode 100644 index 572391259..000000000 --- a/owl/Owl_distribution/Make/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Make (owl.Owl_distribution.Make)

Module Owl_distribution.Make

Parameters

Signature

Uniform distribution
module Uniform : sig ... end
Gaussian distribution
module Gaussian : sig ... end
Exponential distribution
module Exponential : sig ... end
Poisson distribution
module Poisson : sig ... end
Gamma distribution
module Gamma : sig ... end
Beta distribution
module Beta : sig ... end
Chi2 distribution
module Chi2 : sig ... end
F distribution
module F : sig ... end
Cauchy distribution
module Cauchy : sig ... end
Lomax distribution
module Lomax : sig ... end
Weibull distribution
module Weibull : sig ... end
Laplace distribution
module Laplace : sig ... end
Gumbel1 distribution
module Gumbel1 : sig ... end
Gumbel2 distribution
module Gumbel2 : sig ... end
Logistic distribution
module Logistic : sig ... end
Lognormal distribution
module Lognormal : sig ... end
Rayleigh distribution
module Rayleigh : sig ... end
Type definition
type dist =
  1. | Uniform of Uniform.t
  2. | Gaussian of Gaussian.t
  3. | Exponential of Exponential.t
  4. | Gamma of Gamma.t
  5. | Beta of Beta.t
  6. | Chi2 of Chi2.t
  7. | F of F.t
  8. | Cauchy of Cauchy.t
  9. | Lomax of Lomax.t
  10. | Weibull of Weibull.t
  11. | Laplace of Laplace.t
  12. | Gumbel1 of Gumbel1.t
  13. | Gumbel2 of Gumbel2.t
  14. | Logistic of Logistic.t
  15. | Lognormal of Lognormal.t
  16. | Rayleigh of Rayleigh.t
    (*

    Type definition of various distributions

    *)
Core functions
val sample : dist -> int -> A.arr

Sample a given distribution of the given parameters.

val prob : dist -> A.arr -> A.arr

Probability density/mass function of a given distribution.

val log_prob : dist -> A.arr -> A.arr

logarithmic probability density/mass function of a given distribution.

val cdf : dist -> A.arr -> A.arr

Cumulative density/mass function of the distribution.

val logcdf : dist -> A.arr -> A.arr

Logarithm of the cumulative density/mass function of the distribution.

\ No newline at end of file diff --git a/owl/Owl_distribution_common/.dummy b/owl/Owl_distribution_common/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_distribution_generic/.dummy b/owl/Owl_distribution_generic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_fft/.dummy b/owl/Owl_fft/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_fft/D/index.html b/owl/Owl_fft/D/index.html deleted file mode 100644 index a91527c4c..000000000 --- a/owl/Owl_fft/D/index.html +++ /dev/null @@ -1,19 +0,0 @@ - -D (owl.Owl_fft.D)

Module Owl_fft.D

include module type of struct include Owl_fft_d end
val fft : - ?axis:int -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) Owl_dense_ndarray_generic.t
val ifft : - ?axis:int -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) Owl_dense_ndarray_generic.t
val rfft : - ?axis:int -> - (float, Stdlib.Bigarray.float64_elt) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) Owl_dense_ndarray_generic.t
val irfft : - ?axis:int -> - ?n:int -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) Owl_dense_ndarray_generic.t -> - (float, Stdlib.Bigarray.float64_elt) Owl_dense_ndarray_generic.t
val fft2 : - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) Owl_dense_ndarray_generic.t
val ifft2 : - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex64_elt) Owl_dense_ndarray_generic.t
\ No newline at end of file diff --git a/owl/Owl_fft/Generic/index.html b/owl/Owl_fft/Generic/index.html deleted file mode 100644 index f00466ab8..000000000 --- a/owl/Owl_fft/Generic/index.html +++ /dev/null @@ -1,21 +0,0 @@ - -Generic (owl.Owl_fft.Generic)

Module Owl_fft.Generic

include module type of struct include Owl_fft_generic end
Basic functions
val fft : - ?axis:int -> - (Stdlib.Complex.t, 'a) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, 'a) Owl_dense_ndarray_generic.t

fft ~axis x performs 1-dimensional FFT on a complex input. axis is the highest dimension if not specified. The return is not scaled.

val ifft : - ?axis:int -> - (Stdlib.Complex.t, 'a) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, 'a) Owl_dense_ndarray_generic.t

ifft ~axis x performs inverse 1-dimensional FFT on a complex input. axis is the highest dimension by default.

val rfft : - ?axis:int -> - otyp:(Stdlib.Complex.t, 'a) Owl_dense_ndarray_generic.kind -> - (float, 'b) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, 'a) Owl_dense_ndarray_generic.t

rfft ~axis ~otyp x performs 1-dimensional FFT on real input along the axis. otyp is used to specify the output type, it must be the consistent precision with input x. You can skip this parameter by using a submodule with specific precision such as Owl.Fft.S or Owl.Fft.D.

val irfft : - ?axis:int -> - ?n:int -> - otyp:(float, 'a) Owl_dense_ndarray_generic.kind -> - (Stdlib.Complex.t, 'b) Owl_dense_ndarray_generic.t -> - (float, 'a) Owl_dense_ndarray_generic.t

irfft ~axis ~n x is the inverse function of rfft. Note the n parameter is used to specified the size of output.

val fft2 : - (Stdlib.Complex.t, 'a) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, 'a) Owl_dense_ndarray_generic.t

fft2 x performs 2-dimensional FFT on a complex input. The return is not scaled.

val ifft2 : - (Stdlib.Complex.t, 'a) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, 'a) Owl_dense_ndarray_generic.t

ifft2 x performs inverse 2-dimensional FFT on a complex input.

\ No newline at end of file diff --git a/owl/Owl_fft/S/index.html b/owl/Owl_fft/S/index.html deleted file mode 100644 index c43a71d4c..000000000 --- a/owl/Owl_fft/S/index.html +++ /dev/null @@ -1,19 +0,0 @@ - -S (owl.Owl_fft.S)

Module Owl_fft.S

include module type of struct include Owl_fft_s end
val fft : - ?axis:int -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) Owl_dense_ndarray_generic.t
val ifft : - ?axis:int -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) Owl_dense_ndarray_generic.t
val rfft : - ?axis:int -> - (float, Stdlib.Bigarray.float32_elt) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) Owl_dense_ndarray_generic.t
val irfft : - ?axis:int -> - ?n:int -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) Owl_dense_ndarray_generic.t -> - (float, Stdlib.Bigarray.float32_elt) Owl_dense_ndarray_generic.t
val fft2 : - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) Owl_dense_ndarray_generic.t
val ifft2 : - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) Owl_dense_ndarray_generic.t -> - (Stdlib.Complex.t, Stdlib.Bigarray.complex32_elt) Owl_dense_ndarray_generic.t
\ No newline at end of file diff --git a/owl/Owl_fft_d/.dummy b/owl/Owl_fft_d/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_fft_generic/.dummy b/owl/Owl_fft_generic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_fft_s/.dummy b/owl/Owl_fft_s/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_fftpack/.dummy b/owl/Owl_fftpack/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_lapacke/.dummy b/owl/Owl_lapacke/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_lapacke_generated/.dummy b/owl/Owl_lapacke_generated/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_linalg/.dummy b/owl/Owl_linalg/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_linalg/C/index.html b/owl/Owl_linalg/C/index.html deleted file mode 100644 index a5e67b4e8..000000000 --- a/owl/Owl_linalg/C/index.html +++ /dev/null @@ -1,25 +0,0 @@ - -C (owl.Owl_linalg.C)

Module Owl_linalg.C

include module type of struct include Owl_linalg_c end
type elt = Stdlib.Complex.t
type int32_mat = (int32, Stdlib.Bigarray.int32_elt) Owl_dense_matrix_generic.t
include Owl_linalg_intf.Common - with type elt := elt - and type mat := mat - and type complex_mat = mat - and type int32_mat := int32_mat
include Owl_base_linalg_intf.Common - with type elt := elt - with type mat := mat - with type complex_mat = mat - with type int32_mat := int32_mat
type complex_mat = mat
Basic functions
val inv : mat -> mat
val det : mat -> elt
val logdet : mat -> elt
val is_triu : mat -> bool
val is_tril : mat -> bool
val is_symmetric : mat -> bool
val is_diag : mat -> bool
Factorisation
val svd : ?thin:bool -> mat -> mat * mat * mat
val chol : ?upper:bool -> mat -> mat
val qr : ?thin:bool -> ?pivot:bool -> mat -> mat * mat * int32_mat
val lq : ?thin:bool -> mat -> mat * mat
Linear system of equations
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> mat -> mat -> mat
val sylvester : mat -> mat -> mat -> mat
val lyapunov : mat -> mat -> mat
val discrete_lyapunov : - ?solver:[ `default | `direct | `bilinear ] -> - mat -> - mat -> - mat
Basic functions
val pinv : ?tol:float -> mat -> mat
val rank : ?tol:float -> mat -> int
val norm : ?p:float -> mat -> float
val vecnorm : ?p:float -> mat -> float
val cond : ?p:float -> mat -> float
val rcond : mat -> float
val is_posdef : mat -> bool
Factorisation
val lu : mat -> mat * mat * int32_mat
val svdvals : mat -> mat
val gsvd : mat -> mat -> mat * mat * mat * mat * mat * mat
val gsvdvals : mat -> mat -> mat
val schur : mat -> mat * mat * complex_mat
val schur_tz : mat -> mat * mat
val ordschur : select:int32_mat -> mat -> mat -> mat * mat * complex_mat
val qz : mat -> mat -> mat * mat * mat * mat * complex_mat
val ordqz : - select:int32_mat -> - mat -> - mat -> - mat -> - mat -> - mat * mat * mat * mat * complex_mat
val qzvals : mat -> mat -> complex_mat
val hess : mat -> mat * mat
Eigenvalues & eigenvectors
val eig : ?permute:bool -> ?scale:bool -> mat -> complex_mat * complex_mat
val eigvals : ?permute:bool -> ?scale:bool -> mat -> complex_mat
Linear system of equations
val null : mat -> mat
val triangular_solve : upper:bool -> ?trans:bool -> mat -> mat -> mat
val linreg : mat -> mat -> elt * elt
Low-level factorisation functions
val lufact : mat -> mat * int32_mat
val qrfact : ?pivot:bool -> mat -> mat * mat * int32_mat
val bkfact : - ?upper:bool -> - ?symmetric:bool -> - ?rook:bool -> - mat -> - mat * int32_mat
Matrix functions
val mpow : mat -> float -> mat
val expm : mat -> mat
val sinm : mat -> mat
val cosm : mat -> mat
val tanm : mat -> mat
val sincosm : mat -> mat * mat
val sinhm : mat -> mat
val coshm : mat -> mat
val tanhm : mat -> mat
val sinhcoshm : mat -> mat * mat
Helper functions
val select_ev : [ `LHP | `RHP | `UDI | `UDO ] -> mat -> int32_mat
val peakflops : ?n:int -> unit -> float
\ No newline at end of file diff --git a/owl/Owl_linalg/D/index.html b/owl/Owl_linalg/D/index.html deleted file mode 100644 index fca25e436..000000000 --- a/owl/Owl_linalg/D/index.html +++ /dev/null @@ -1,25 +0,0 @@ - -D (owl.Owl_linalg.D)

Module Owl_linalg.D

include module type of struct include Owl_linalg_d end
type elt = float
type complex_mat = Owl_dense_matrix_z.mat
type int32_mat = (int32, Stdlib.Bigarray.int32_elt) Owl_dense_matrix_generic.t
include Owl_linalg_intf.Common - with type elt := elt - and type mat := mat - and type complex_mat := complex_mat - and type int32_mat := int32_mat
include Owl_base_linalg_intf.Common - with type elt := elt - with type mat := mat - with type complex_mat := complex_mat - with type int32_mat := int32_mat
Basic functions
val inv : mat -> mat
val det : mat -> elt
val logdet : mat -> elt
val is_triu : mat -> bool
val is_tril : mat -> bool
val is_symmetric : mat -> bool
val is_diag : mat -> bool
Factorisation
val svd : ?thin:bool -> mat -> mat * mat * mat
val chol : ?upper:bool -> mat -> mat
val qr : ?thin:bool -> ?pivot:bool -> mat -> mat * mat * int32_mat
val lq : ?thin:bool -> mat -> mat * mat
Linear system of equations
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> mat -> mat -> mat
val sylvester : mat -> mat -> mat -> mat
val lyapunov : mat -> mat -> mat
val discrete_lyapunov : - ?solver:[ `default | `direct | `bilinear ] -> - mat -> - mat -> - mat
Basic functions
val pinv : ?tol:float -> mat -> mat
val rank : ?tol:float -> mat -> int
val norm : ?p:float -> mat -> float
val vecnorm : ?p:float -> mat -> float
val cond : ?p:float -> mat -> float
val rcond : mat -> float
val is_posdef : mat -> bool
Factorisation
val lu : mat -> mat * mat * int32_mat
val svdvals : mat -> mat
val gsvd : mat -> mat -> mat * mat * mat * mat * mat * mat
val gsvdvals : mat -> mat -> mat
val schur : mat -> mat * mat * complex_mat
val schur_tz : mat -> mat * mat
val ordschur : select:int32_mat -> mat -> mat -> mat * mat * complex_mat
val qz : mat -> mat -> mat * mat * mat * mat * complex_mat
val ordqz : - select:int32_mat -> - mat -> - mat -> - mat -> - mat -> - mat * mat * mat * mat * complex_mat
val qzvals : mat -> mat -> complex_mat
val hess : mat -> mat * mat
Eigenvalues & eigenvectors
val eig : ?permute:bool -> ?scale:bool -> mat -> complex_mat * complex_mat
val eigvals : ?permute:bool -> ?scale:bool -> mat -> complex_mat
Linear system of equations
val null : mat -> mat
val triangular_solve : upper:bool -> ?trans:bool -> mat -> mat -> mat
val linreg : mat -> mat -> elt * elt
Low-level factorisation functions
val lufact : mat -> mat * int32_mat
val qrfact : ?pivot:bool -> mat -> mat * mat * int32_mat
val bkfact : - ?upper:bool -> - ?symmetric:bool -> - ?rook:bool -> - mat -> - mat * int32_mat
Matrix functions
val mpow : mat -> float -> mat
val expm : mat -> mat
val sinm : mat -> mat
val cosm : mat -> mat
val tanm : mat -> mat
val sincosm : mat -> mat * mat
val sinhm : mat -> mat
val coshm : mat -> mat
val tanhm : mat -> mat
val sinhcoshm : mat -> mat * mat
Helper functions
val select_ev : [ `LHP | `RHP | `UDI | `UDO ] -> mat -> int32_mat
val peakflops : ?n:int -> unit -> float
include Owl_linalg_intf.Real with type mat := mat and type elt := elt
include Owl_base_linalg_intf.Real with type mat := mat with type elt := elt
val care : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
val dare : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
\ No newline at end of file diff --git a/owl/Owl_linalg/Generic/index.html b/owl/Owl_linalg/Generic/index.html deleted file mode 100644 index bf294f787..000000000 --- a/owl/Owl_linalg/Generic/index.html +++ /dev/null @@ -1,81 +0,0 @@ - -Generic (owl.Owl_linalg.Generic)

Module Owl_linalg.Generic

include module type of struct include Owl_linalg_generic end

The module includes a set of advanced linear algebra operations such as singular value decomposition, and etc.

Currently, Linalg module supports dense matrix of four different number types, including float32, float64, complex32, and complex64. The support for sparse matrices will be provided in future.

Type definition
type ('a, 'b) t = ('a, 'b) Owl_dense_matrix_generic.t

Matrix type, a special case of N-dimensional array.

Basic functions
val inv : ('a, 'b) t -> ('a, 'b) t

inv x calculates the inverse of an invertible square matrix x such that x *@ x = I wherein I is an identity matrix. (If x is singular, inv will return a useless result.)

val pinv : ?tol:float -> ('a, 'b) t -> ('a, 'b) t

pinv x computes Moore-Penrose pseudoinverse of matrix x. tol specifies the tolerance, the absolute value of the elements smaller than tol will be set to zeros.

val det : ('a, 'b) t -> 'a

det x computes the determinant of a square matrix x.

val logdet : ('a, 'b) t -> 'a

logdet x computes the log of the determinant of a square matrix x. It is equivalent to log (det x) but may provide more accuracy and efficiency.

val rank : ?tol:float -> ('a, 'b) t -> int

rank x calculates the rank of a rectangular matrix x of shape m x n. The function does so by counting the number of singular values of x which are beyond a pre-defined threshold tol. By default, tol = max(m,n) * eps where eps = 1e-10.

val norm : ?p:float -> ('a, 'b) t -> float

norm ~p x computes the matrix p-norm of the passed in matrix x.

Parameters: * p is the order of norm, the default value is 2. * x is the input matrix.

Returns: * If p = 1, then returns the maximum absolute column sum of the matrix. * If p = 2, then returns approximately max (svd x). * If p = infinity, then returns the maximum absolute row sum of the matrix. * If p = -1, then returns the minimum absolute column sum of the matrix. * If p = -2, then returns approximately min (svd x). * If p = -infinity, then returns the minimum absolute row sum of the matrix.

val vecnorm : ?p:float -> ('a, 'b) t -> float

vecnorm ~p x calculates the generalised vector p-norm, defined as below. If x is a martrix, it will be flatten to a vector first. Different from the function of the same name in :doc:`owl_dense_ndarray_generic`, this function assumes the input is either 1d vector or 2d matrix.

.. math:: ||v||_p = \Big \sum_{k=0}^{N-1} |v_k|^p \Big^

/p

Parameters: * p is the order of norm, the default value is 2. * x is the input vector or matrix.

Returns: * If p = infinity, then returns :math:`||v||_\infty = \max_i(|v(i)|)`. * If p = -infinity, then returns :math:`||v||_

\infty

}

= \min_i(|v(i)|)`. * If p = 2 and x is a matrix, then returns Frobenius norm of x. * Otherwise returns generalised vector p-norm defined above.

val cond : ?p:float -> ('a, 'b) t -> float

cond ~p x computes the p-norm condition number of matrix x.

cond ~p:1. x returns the 1-norm condition number;

cond ~p:2. x or cond x returns the 2-norm condition number.

cond ~p:infinity x returns the infinity norm condition number.

The default value of p is 2.

val rcond : ('a, 'b) t -> float

rcond x returns an estimate for the reciprocal condition of x in 1-norm. If x is well conditioned, the returned result is near 1.0. If x is badly conditioned, the result is near 0.

Check matrix types
val is_square : ('a, 'b) t -> bool

is_square x returns true if x is a square matrix otherwise false.

val is_triu : ('a, 'b) t -> bool

is_triu x returns true if x is upper triangular otherwise false.

val is_tril : ('a, 'b) t -> bool

is_tril x returns true if x is lower triangular otherwise false.

val is_symmetric : ('a, 'b) t -> bool

is_symmetric x returns true if x is symmetric otherwise false.

val is_hermitian : (Stdlib.Complex.t, 'a) t -> bool

is_hermitian x returns true if x is hermitian otherwise false.

val is_diag : ('a, 'b) t -> bool

is_diag x returns true if x is diagonal otherwise false.

val is_posdef : ('a, 'b) t -> bool

is_posdef x checks whether x is a positive semi-definite matrix.

Factorisation
val lu : - ('a, 'b) t -> - ('a, 'b) t * ('a, 'b) t * (int32, Stdlib.Bigarray.int32_elt) t

lu x -> (l, u, ipiv) calculates LU decomposition of x. The pivoting is used by default.

val lq : ?thin:bool -> ('a, 'b) t -> ('a, 'b) t * ('a, 'b) t

lq x -> (l, q) calculates the LQ decomposition of x. By default, the reduced LQ decomposition is performed. But you can get full Q by setting parameter thin = false.

val qr : - ?thin:bool -> - ?pivot:bool -> - ('a, 'b) t -> - ('a, 'b) t * ('a, 'b) t * (int32, Stdlib.Bigarray.int32_elt) t

qr x calculates QR decomposition for an m by n matrix x as x = Q R. Q is an m by n matrix (where Q^T Q = I) and R is an n by n upper-triangular matrix.

The function returns a 3-tuple, the first two are q and r, and the third is the permutation vector of columns. The default value of pivot is false, setting pivot = true lets qr performs pivoted factorisation. Note that the returned indices are not adjusted to 0-based C layout.

By default, qr performs a reduced QR factorisation, full factorisation can be enabled by setting thin parameter to false.

val chol : ?upper:bool -> ('a, 'b) t -> ('a, 'b) t

chol x -> u calculates the Cholesky factorisation of a positive definite matrix x such that x = u' *@ u. By default, the upper triangular matrix is returned. The lower triangular part can be obtained by setting the parameter upper = false.

val svd : ?thin:bool -> ('a, 'b) t -> ('a, 'b) t * ('a, 'b) t * ('a, 'b) t

svd x -> (u, s, vt) calculates the singular value decomposition of x, and returns a 3-tuple (u,s,vt). By default, a reduced svd is performed: E.g., for a m x n matrix x wherein m <= n, u is returned as an m by m orthogonal matrix, s an 1 by m row vector of singular values, and vt is the transpose of an n by m orthogonal rectangular matrix.

The full svd can be performed by setting thin = false. Note that for complex numbers, the type of returned singular values are also complex, the imaginary part is zero.

val svdvals : ('a, 'b) t -> ('a, 'b) t

svdvals x -> s performs the singular value decomposition of x like svd x, but the function only returns the singular values without u and vt. Note that for complex numbers, the return is also complex type.

val gsvd : - ('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t * ('a, 'b) t * ('a, 'b) t * ('a, 'b) t * ('a, 'b) t * ('a, 'b) t

gsvd x y -> (u, v, q, d1, d2, r) computes the generalised singular value decomposition of a pair of general rectangular matrices x and y. d1 and d2 contain the generalised singular value pairs of x and y. The shape of x is m x n and the shape of y is p x n.

.. code-block:: ocaml

let x = Mat.uniform 5 5;; let y = Mat.uniform 2 5;; let u, v, q, d1, d2, r = Linalg.gsvd x y;; Mat.(u *@ d1 *@ r *@ transpose q =~ x);; Mat.(v *@ d2 *@ r *@ transpose q =~ y);;

Please refer to: `Intel MKL Reference <https://software.intel.com/en-us/mkl-developer-reference-c-ggsvd3>`_

val gsvdvals : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

gsvdvals x y is similar to gsvd x y but only returns the singular values of the generalised singular value decomposition of x and y.

val schur : - otyp:('c, 'd) Stdlib.Bigarray.kind -> - ('a, 'b) t -> - ('a, 'b) t * ('a, 'b) t * ('c, 'd) t

schur x -> (t, z, w) calculates Schur factorisation of x in the following form.

.. math:: X = Z T Z^H

Parameters: * otyp: the complex type of eigen values. * x: the n x n square matrix.

Returns: * t is (quasi) triangular Schur factor. * z is orthogonal/unitary Schur vectors. The eigen values are not sorted, they have the same order as that they appear on the diagonal of the output of Schur form t. * w contains the eigen values of x. otyp is used to specify the type of w. It needs to be consistent with input type. E.g., if the input x is float32 then otyp must be complex32. However, if you use S, D, C, Z module, then you do not need to worry about otyp.

val schur_tz : ('a, 'b) t -> ('a, 'b) t * ('a, 'b) t

schur_tz x is similar to schur but only returns (t, z).

val ordschur : - otyp:('c, 'd) Stdlib.Bigarray.kind -> - select:(int32, Stdlib.Bigarray.int32_elt) t -> - ('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t * ('a, 'b) t * ('c, 'd) t

ordschur ~select t z -> (r, p) reorders t and z returned by Schur factorization schur x -> (t, z) according select such that

.. math:: X = P R P^H

Parameters: * otyp: the complex type of eigen values * select the logical vector to select eigenvalues, refer to select_ev. * t: the Schur matrix returned by schur x. * z: the unitary matrix z returned by schur x.

Returns: * r: reordered Schur matrix t. * p: reordered orthogonal matrix z.

val qz : - otyp:('c, 'd) Stdlib.Bigarray.kind -> - ('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t * ('a, 'b) t * ('a, 'b) t * ('a, 'b) t * ('c, 'd) t

qz x -> (s, t, q, z, w) calculates generalised Schur factorisation of x in the following form. It is also known as QZ decomposition.

.. math:: X = Q S Z^H Y = Z T Z^H

Parameters: * otyp: the complex type of eigen values. * x: the n x n square matrix. * y: the n x n square matrix.

Returns: * s: the upper quasitriangular matrices S. * t: the upper quasitriangular matrices T. * q: the unitary matrices Q. * z: the unitary matrices Z. * w: the generalised eigenvalue for a pair of matrices (X,Y).

val ordqz : - otyp:('c, 'd) Stdlib.Bigarray.kind -> - select:(int32, Stdlib.Bigarray.int32_elt) t -> - ('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t * ('a, 'b) t * ('a, 'b) t * ('a, 'b) t * ('c, 'd) t

ordqz ~select a b q z reorders the generalised Schur decomposition of a pair of matrices (X,Y) so that a selected cluster of eigenvalues appears in the leading diagonal blocks of (X,Y).

val qzvals : - otyp:('c, 'd) Stdlib.Bigarray.kind -> - ('a, 'b) t -> - ('a, 'b) t -> - ('c, 'd) t

qzvals ~otyp x y is similar to qz ~otyp x y but only returns the generalised eigen values.

val hess : ('a, 'b) t -> ('a, 'b) t * ('a, 'b) t

hess x -> (h, q) calculates the Hessenberg form of a given matrix x. Both Hessenberg matrix h and unitary matrix q is returned, such that x = q *@ h *@ (transpose q).

.. math:: X = Q H Q^T

Eigenvalues & eigenvectors
val eig : - ?permute:bool -> - ?scale:bool -> - otyp:('a, 'b) Stdlib.Bigarray.kind -> - ('c, 'd) t -> - ('a, 'b) t * ('a, 'b) t

eig x -> v, w computes the right eigenvectors v and eigenvalues w of an arbitrary square matrix x. The eigenvectors are column vectors in v, their corresponding eigenvalues have the same order in w as that in v.

Note that otyp specifies the complex type of the output, but you do not need worry about this parameter if you use S, D, C, Z modules in Linalg.

val eigvals : - ?permute:bool -> - ?scale:bool -> - otyp:('a, 'b) Stdlib.Bigarray.kind -> - ('c, 'd) t -> - ('a, 'b) t

eigvals x -> w is similar to eig but only computes the eigenvalues of an arbitrary square matrix x.

Linear system of equations
val null : ('a, 'b) t -> ('a, 'b) t

null a -> x computes an orthonormal basis x for the null space of a obtained from the singular value decomposition. Namely, a *@ x has negligible elements, M.col_num x is the nullity of a, and transpose x *@ x = I. Namely,

.. math:: X^T X = I

val triangular_solve : - upper:bool -> - ?trans:bool -> - ('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t

triangular_linsolve a b -> x solves a linear system of equations a * x = b where a is either a upper or a lower triangular matrix. This function uses cblas trsm under the hood.

.. math:: AX = B

By default, trans = false indicates no transpose. If trans = true, then function will solve A^T * x = b for real matrices; A^H * x = b for complex matrices.

.. math:: A^H X = B

val linsolve : - ?trans:bool -> - ?typ:[ `n | `u | `l ] -> - ('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t

linsolve a b -> x solves a linear system of equations a * x = b in the following form. By default, typ=`n and the function use LU factorisation with partial pivoting when a is square and QR factorisation with column pivoting otherwise. The number of rows of a must equal the number of rows of b. If a is a upper(lower) triangular matrix, the function calls the solve_triangular function when typ=`u(typ=`l).

.. math:: AX = B

By default, trans = false indicates no transpose. If trans = true, then function will solve A^T * x = b for real matrices; A^H * x = b for complex matrices.

.. math:: A^H X = B

The associated operator is /@, so you can simply use a /@ b to solve the linear equation system to get x. Please refer to :doc:`owl_operator`.

val linreg : ('a, 'b) t -> ('a, 'b) t -> 'a * 'a

linreg x y -> (a, b) solves y = a + b*x using Ordinary Least Squares.

.. math:: Y = A + BX

val sylvester : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

sylvester a b c solves a Sylvester equation in the following form. The function calls LAPACKE function trsyl solve the system.

.. math:: AX + XB = C

Parameters: * a : m x m matrix A. * b : n x n matrix B. * c : m x n matrix C.

Returns: * x : m x n matrix X.

val lyapunov : ('a, 'b) t -> ('a, 'b) t -> ('a, 'b) t

lyapunov a q solves a continuous Lyapunov equation in the following form. The function calls LAPACKE function trsyl solve the system. In Matlab, the same function is called lyap.

.. math:: AX + XA^H = Q

Parameters: * a : m x m matrix A. * q : n x n matrix Q.

Returns: * x : m x n matrix X.

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - ('a, 'b) t -> - ('a, 'b) t -> - ('a, 'b) t

discrete_lyapunov a q solves a discrete-time Lyapunov equation in the following form.

.. math:: X - AXA^H = Q

Parameters: * a : m x m matrix A. * q : n x n matrix Q.

Returns: * x : m x n matrix X.

val care : - ?diag_r:bool -> - (float, 'a) t -> - (float, 'a) t -> - (float, 'a) t -> - (float, 'a) t -> - (float, 'a) t

care ?diag_r a b q r solves the continuous-time algebraic Riccati equation system in the following form. The algorithm is based on :cite:`laub1979schur`.

.. math:: A^T X + X A − X B R^

1

}

B^T X + Q = 0

Parameters: * a : real cofficient matrix A. * b : real cofficient matrix B. * q : real cofficient matrix Q. * r : real cofficient matrix R. R must be non-singular. * diag_r : true if R is a diagonal matrix, false by default.

Returns: * x : a solution matrix X.

val dare : - ?diag_r:bool -> - (float, 'a) t -> - (float, 'a) t -> - (float, 'a) t -> - (float, 'a) t -> - (float, 'a) t

dare ?diag_r a b q r solves the discrete-time algebraic Riccati equation system in the following form. The algorithm is based on :cite:`laub1979schur`.

.. math:: A^T X A - X - (A^T X B) (B^T X B + R)^

1

}

(B^T X A) + Q = 0

Parameters: * a : real cofficient matrix A. A must be non-singular. * b : real cofficient matrix B. * q : real cofficient matrix Q. * r : real cofficient matrix R. R must be non-singular. * diag_r : true if R is a diagonal matrix, false by default.

Returns: * x : a symmetric solution matrix X.

Low-level factorisation functions
val lufact : ('a, 'b) t -> ('a, 'b) t * (int32, Stdlib.Bigarray.int32_elt) t

lufact x -> (a, ipiv) calculates LU factorisation with pivot of a general matrix x.

val qrfact : - ?pivot:bool -> - ('a, 'b) t -> - ('a, 'b) t * ('a, 'b) t * (int32, Stdlib.Bigarray.int32_elt) t

qrfact x -> (a, tau, jpvt) calculates QR factorisation of a general matrix x.

val bkfact : - ?upper:bool -> - ?symmetric:bool -> - ?rook:bool -> - ('a, 'b) t -> - ('a, 'b) t * (int32, Stdlib.Bigarray.int32_elt) t

bk x -> (a, ipiv) calculates Bunch-Kaufman factorisation of x. If symmetric = true then x is symmetric, if symmetric = false then x is hermitian. If rook = true the function performs bounded Bunch-Kaufman ("rook") diagonal pivoting method, if rook = false then Bunch-Kaufman diagonal pivoting method is used. a contains details of the block-diagonal matrix d and the multipliers used to obtain the factor u (or l).

The upper indicates whether the upper or lower triangular part of x is stored and how x is factored. If upper = true then upper triangular part is stored: x = u*d*u' else x = l*d*l'.

For ipiv, it indicates the details of the interchanges and the block structure of d. Please refer to the function sytrf, hetrf in MKL documentation for more details.

Matrix functions
val mpow : ('a, 'b) t -> float -> ('a, 'b) t

mpow x r returns the dot product of square matrix x with itself r times, and more generally raises the matrix to the rth power. r is a float that must be equal to an integer; it can be be negative, zero, or positive. Non-integer exponents are not yet implemented. (If r is negative, mpow calls inv, and warnings in documentation for inv apply.)

val expm : ('a, 'b) t -> ('a, 'b) t

expm x computes the matrix exponential of x defined by

.. math:: e^x = \sum_k=0^\infty \frac

k! x^k

The function implements the scaling and squaring algorithm which uses Padé approximation to compute the matrix exponential :cite:`al2009new`.

val sinm : ('a, 'b) t -> ('a, 'b) t

sinm x computes the matrix sine of input x. The function uses expm to compute the matrix exponentials.

val cosm : ('a, 'b) t -> ('a, 'b) t

cosm x computes the matrix cosine of input x. The function uses expm to compute the matrix exponentials.

val tanm : ('a, 'b) t -> ('a, 'b) t

tanm x computes the matrix tangent of input x. The function uses expm to compute the matrix exponentials.

val sincosm : ('a, 'b) t -> ('a, 'b) t * ('a, 'b) t

sincosm x returns both matrix sine and cosine of x.

val sinhm : ('a, 'b) t -> ('a, 'b) t

sinhm x computes the hyperbolic matrix sine of input x. The function uses expm to compute the matrix exponentials.

val coshm : ('a, 'b) t -> ('a, 'b) t

coshm x computes the hyperbolic matrix cosine of input x. The function uses expm to compute the matrix exponentials.

val tanhm : ('a, 'b) t -> ('a, 'b) t

tanhm x computes the hyperbolic matrix tangent of input x. The function uses expm to compute the matrix exponentials.

val sinhcoshm : ('a, 'b) t -> ('a, 'b) t * ('a, 'b) t

sinhcoshm x returns both hyperbolic matrix sine and cosine of x.

Helper functions
val select_ev : - [ `LHP | `RHP | `UDI | `UDO ] -> - ('a, 'b) t -> - (int32, Stdlib.Bigarray.int32_elt) t

select_ev keyword ev generates a logical vector (of same shape as ev) from eigen values ev according to the passed in keywards.

  • LHP: Left-half plane :math:`(real(e) < 0)`.
  • RHP: Left-half plane :math:`(real(e) \ge 0)`.
  • UDI: Left-half plane :math:`(abs(e) < 1)`.
  • UDO: Left-half plane :math:`(abs(e) \ge 0)`.
val peakflops : ?n:int -> unit -> float

peakflops () returns the peak number of float point operations using Owl_cblas_basic.dgemm function. The default matrix size is 2000 x 2000, but you can change this by setting n to other numbers as you like.

\ No newline at end of file diff --git a/owl/Owl_linalg/S/index.html b/owl/Owl_linalg/S/index.html deleted file mode 100644 index c720b4ef9..000000000 --- a/owl/Owl_linalg/S/index.html +++ /dev/null @@ -1,25 +0,0 @@ - -S (owl.Owl_linalg.S)

Module Owl_linalg.S

include module type of struct include Owl_linalg_s end
type elt = float
type complex_mat = Owl_dense_matrix_c.mat
type int32_mat = (int32, Stdlib.Bigarray.int32_elt) Owl_dense_matrix_generic.t
include Owl_linalg_intf.Common - with type elt := elt - and type mat := mat - and type complex_mat := complex_mat - and type int32_mat := int32_mat
include Owl_base_linalg_intf.Common - with type elt := elt - with type mat := mat - with type complex_mat := complex_mat - with type int32_mat := int32_mat
Basic functions
val inv : mat -> mat
val det : mat -> elt
val logdet : mat -> elt
val is_triu : mat -> bool
val is_tril : mat -> bool
val is_symmetric : mat -> bool
val is_diag : mat -> bool
Factorisation
val svd : ?thin:bool -> mat -> mat * mat * mat
val chol : ?upper:bool -> mat -> mat
val qr : ?thin:bool -> ?pivot:bool -> mat -> mat * mat * int32_mat
val lq : ?thin:bool -> mat -> mat * mat
Linear system of equations
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> mat -> mat -> mat
val sylvester : mat -> mat -> mat -> mat
val lyapunov : mat -> mat -> mat
val discrete_lyapunov : - ?solver:[ `default | `direct | `bilinear ] -> - mat -> - mat -> - mat
Basic functions
val pinv : ?tol:float -> mat -> mat
val rank : ?tol:float -> mat -> int
val norm : ?p:float -> mat -> float
val vecnorm : ?p:float -> mat -> float
val cond : ?p:float -> mat -> float
val rcond : mat -> float
val is_posdef : mat -> bool
Factorisation
val lu : mat -> mat * mat * int32_mat
val svdvals : mat -> mat
val gsvd : mat -> mat -> mat * mat * mat * mat * mat * mat
val gsvdvals : mat -> mat -> mat
val schur : mat -> mat * mat * complex_mat
val schur_tz : mat -> mat * mat
val ordschur : select:int32_mat -> mat -> mat -> mat * mat * complex_mat
val qz : mat -> mat -> mat * mat * mat * mat * complex_mat
val ordqz : - select:int32_mat -> - mat -> - mat -> - mat -> - mat -> - mat * mat * mat * mat * complex_mat
val qzvals : mat -> mat -> complex_mat
val hess : mat -> mat * mat
Eigenvalues & eigenvectors
val eig : ?permute:bool -> ?scale:bool -> mat -> complex_mat * complex_mat
val eigvals : ?permute:bool -> ?scale:bool -> mat -> complex_mat
Linear system of equations
val null : mat -> mat
val triangular_solve : upper:bool -> ?trans:bool -> mat -> mat -> mat
val linreg : mat -> mat -> elt * elt
Low-level factorisation functions
val lufact : mat -> mat * int32_mat
val qrfact : ?pivot:bool -> mat -> mat * mat * int32_mat
val bkfact : - ?upper:bool -> - ?symmetric:bool -> - ?rook:bool -> - mat -> - mat * int32_mat
Matrix functions
val mpow : mat -> float -> mat
val expm : mat -> mat
val sinm : mat -> mat
val cosm : mat -> mat
val tanm : mat -> mat
val sincosm : mat -> mat * mat
val sinhm : mat -> mat
val coshm : mat -> mat
val tanhm : mat -> mat
val sinhcoshm : mat -> mat * mat
Helper functions
val select_ev : [ `LHP | `RHP | `UDI | `UDO ] -> mat -> int32_mat
val peakflops : ?n:int -> unit -> float
include Owl_linalg_intf.Real with type mat := mat and type elt := elt
include Owl_base_linalg_intf.Real with type mat := mat with type elt := elt
val care : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
val dare : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
\ No newline at end of file diff --git a/owl/Owl_linalg/Z/index.html b/owl/Owl_linalg/Z/index.html deleted file mode 100644 index d03b9b321..000000000 --- a/owl/Owl_linalg/Z/index.html +++ /dev/null @@ -1,25 +0,0 @@ - -Z (owl.Owl_linalg.Z)

Module Owl_linalg.Z

include module type of struct include Owl_linalg_z end
type elt = Stdlib.Complex.t
type int32_mat = (int32, Stdlib.Bigarray.int32_elt) Owl_dense_matrix_generic.t
include Owl_linalg_intf.Common - with type elt := elt - and type mat := mat - and type complex_mat = mat - and type int32_mat := int32_mat
include Owl_base_linalg_intf.Common - with type elt := elt - with type mat := mat - with type complex_mat = mat - with type int32_mat := int32_mat
type complex_mat = mat
Basic functions
val inv : mat -> mat
val det : mat -> elt
val logdet : mat -> elt
val is_triu : mat -> bool
val is_tril : mat -> bool
val is_symmetric : mat -> bool
val is_diag : mat -> bool
Factorisation
val svd : ?thin:bool -> mat -> mat * mat * mat
val chol : ?upper:bool -> mat -> mat
val qr : ?thin:bool -> ?pivot:bool -> mat -> mat * mat * int32_mat
val lq : ?thin:bool -> mat -> mat * mat
Linear system of equations
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> mat -> mat -> mat
val sylvester : mat -> mat -> mat -> mat
val lyapunov : mat -> mat -> mat
val discrete_lyapunov : - ?solver:[ `default | `direct | `bilinear ] -> - mat -> - mat -> - mat
Basic functions
val pinv : ?tol:float -> mat -> mat
val rank : ?tol:float -> mat -> int
val norm : ?p:float -> mat -> float
val vecnorm : ?p:float -> mat -> float
val cond : ?p:float -> mat -> float
val rcond : mat -> float
val is_posdef : mat -> bool
Factorisation
val lu : mat -> mat * mat * int32_mat
val svdvals : mat -> mat
val gsvd : mat -> mat -> mat * mat * mat * mat * mat * mat
val gsvdvals : mat -> mat -> mat
val schur : mat -> mat * mat * complex_mat
val schur_tz : mat -> mat * mat
val ordschur : select:int32_mat -> mat -> mat -> mat * mat * complex_mat
val qz : mat -> mat -> mat * mat * mat * mat * complex_mat
val ordqz : - select:int32_mat -> - mat -> - mat -> - mat -> - mat -> - mat * mat * mat * mat * complex_mat
val qzvals : mat -> mat -> complex_mat
val hess : mat -> mat * mat
Eigenvalues & eigenvectors
val eig : ?permute:bool -> ?scale:bool -> mat -> complex_mat * complex_mat
val eigvals : ?permute:bool -> ?scale:bool -> mat -> complex_mat
Linear system of equations
val null : mat -> mat
val triangular_solve : upper:bool -> ?trans:bool -> mat -> mat -> mat
val linreg : mat -> mat -> elt * elt
Low-level factorisation functions
val lufact : mat -> mat * int32_mat
val qrfact : ?pivot:bool -> mat -> mat * mat * int32_mat
val bkfact : - ?upper:bool -> - ?symmetric:bool -> - ?rook:bool -> - mat -> - mat * int32_mat
Matrix functions
val mpow : mat -> float -> mat
val expm : mat -> mat
val sinm : mat -> mat
val cosm : mat -> mat
val tanm : mat -> mat
val sincosm : mat -> mat * mat
val sinhm : mat -> mat
val coshm : mat -> mat
val tanhm : mat -> mat
val sinhcoshm : mat -> mat * mat
Helper functions
val select_ev : [ `LHP | `RHP | `UDI | `UDO ] -> mat -> int32_mat
val peakflops : ?n:int -> unit -> float
\ No newline at end of file diff --git a/owl/Owl_linalg_c/.dummy b/owl/Owl_linalg_c/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_linalg_d/.dummy b/owl/Owl_linalg_d/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_linalg_generic/.dummy b/owl/Owl_linalg_generic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_linalg_intf/.dummy b/owl/Owl_linalg_intf/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_linalg_intf/module-type-Common/index.html b/owl/Owl_linalg_intf/module-type-Common/index.html deleted file mode 100644 index 2e2eaefad..000000000 --- a/owl/Owl_linalg_intf/module-type-Common/index.html +++ /dev/null @@ -1,17 +0,0 @@ - -Common (owl.Owl_linalg_intf.Common)

Module type Owl_linalg_intf.Common

include Owl_base_linalg_intf.Common
type elt
type mat
type complex_mat
type int32_mat
Basic functions
val inv : mat -> mat
val det : mat -> elt
val logdet : mat -> elt
val is_triu : mat -> bool
val is_tril : mat -> bool
val is_symmetric : mat -> bool
val is_diag : mat -> bool
Factorisation
val svd : ?thin:bool -> mat -> mat * mat * mat
val chol : ?upper:bool -> mat -> mat
val qr : ?thin:bool -> ?pivot:bool -> mat -> mat * mat * int32_mat
val lq : ?thin:bool -> mat -> mat * mat
Linear system of equations
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> mat -> mat -> mat
val sylvester : mat -> mat -> mat -> mat
val lyapunov : mat -> mat -> mat
val discrete_lyapunov : - ?solver:[ `default | `direct | `bilinear ] -> - mat -> - mat -> - mat
Basic functions
val pinv : ?tol:float -> mat -> mat
val rank : ?tol:float -> mat -> int
val norm : ?p:float -> mat -> float
val vecnorm : ?p:float -> mat -> float
val cond : ?p:float -> mat -> float
val rcond : mat -> float
val is_posdef : mat -> bool
Factorisation
val lu : mat -> mat * mat * int32_mat
val svdvals : mat -> mat
val gsvd : mat -> mat -> mat * mat * mat * mat * mat * mat
val gsvdvals : mat -> mat -> mat
val schur : mat -> mat * mat * complex_mat
val schur_tz : mat -> mat * mat
val ordschur : select:int32_mat -> mat -> mat -> mat * mat * complex_mat
val qz : mat -> mat -> mat * mat * mat * mat * complex_mat
val ordqz : - select:int32_mat -> - mat -> - mat -> - mat -> - mat -> - mat * mat * mat * mat * complex_mat
val qzvals : mat -> mat -> complex_mat
val hess : mat -> mat * mat
Eigenvalues & eigenvectors
val eig : ?permute:bool -> ?scale:bool -> mat -> complex_mat * complex_mat
val eigvals : ?permute:bool -> ?scale:bool -> mat -> complex_mat
Linear system of equations
val null : mat -> mat
val triangular_solve : upper:bool -> ?trans:bool -> mat -> mat -> mat
val linreg : mat -> mat -> elt * elt
Low-level factorisation functions
val lufact : mat -> mat * int32_mat
val qrfact : ?pivot:bool -> mat -> mat * mat * int32_mat
val bkfact : - ?upper:bool -> - ?symmetric:bool -> - ?rook:bool -> - mat -> - mat * int32_mat
Matrix functions
val mpow : mat -> float -> mat
val expm : mat -> mat
val sinm : mat -> mat
val cosm : mat -> mat
val tanm : mat -> mat
val sincosm : mat -> mat * mat
val sinhm : mat -> mat
val coshm : mat -> mat
val tanhm : mat -> mat
val sinhcoshm : mat -> mat * mat
Helper functions
val select_ev : [ `LHP | `RHP | `UDI | `UDO ] -> mat -> int32_mat
val peakflops : ?n:int -> unit -> float
\ No newline at end of file diff --git a/owl/Owl_linalg_intf/module-type-Real/index.html b/owl/Owl_linalg_intf/module-type-Real/index.html deleted file mode 100644 index c1dec688e..000000000 --- a/owl/Owl_linalg_intf/module-type-Real/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Real (owl.Owl_linalg_intf.Real)

Module type Owl_linalg_intf.Real

include Owl_base_linalg_intf.Real
type elt
type mat
val care : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
val dare : ?diag_r:bool -> mat -> mat -> mat -> mat -> mat
\ No newline at end of file diff --git a/owl/Owl_linalg_s/.dummy b/owl/Owl_linalg_s/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_linalg_z/.dummy b/owl/Owl_linalg_z/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_maths/.dummy b/owl/Owl_maths/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_maths_special/.dummy b/owl/Owl_maths_special/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_matrix/.dummy b/owl/Owl_matrix/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_matrix_check/.dummy b/owl/Owl_matrix_check/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_matrix_swap/.dummy b/owl/Owl_matrix_swap/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_ndarray/.dummy b/owl/Owl_ndarray/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_ndarray_contract/.dummy b/owl/Owl_ndarray_contract/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_ndarray_conv/.dummy b/owl/Owl_ndarray_conv/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_ndarray_fma/.dummy b/owl/Owl_ndarray_fma/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_ndarray_maths/.dummy b/owl/Owl_ndarray_maths/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_ndarray_pool/.dummy b/owl/Owl_ndarray_pool/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_ndarray_repeat/.dummy b/owl/Owl_ndarray_repeat/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_ndarray_slide/.dummy b/owl/Owl_ndarray_slide/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_ndarray_sort/.dummy b/owl/Owl_ndarray_sort/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_ndarray_transpose/.dummy b/owl/Owl_ndarray_transpose/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_ndarray_upsampling/.dummy b/owl/Owl_ndarray_upsampling/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_ndarray_utils/.dummy b/owl/Owl_ndarray_utils/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_neural/.dummy b/owl/Owl_neural/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_neural/D/Graph/Neuron/Activation/index.html b/owl/Owl_neural/D/Graph/Neuron/Activation/index.html deleted file mode 100644 index 3ba9f17e5..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Activation/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Activation (owl.Owl_neural.D.Graph.Neuron.Activation)

Module Neuron.Activation

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Activation.typ = -
  1. | Elu
  2. | Relu
  3. | Sigmoid
  4. | HardSigmoid
  5. | Softmax of int
  6. | Softplus
  7. | Softsign
  8. | Tanh
  9. | Relu6
  10. | LeakyRelu of float
  11. | TRelu of float
  12. | Custom of Optimise.Algodiff.t -> Optimise.Algodiff.t
  13. | None
type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Activation.neuron_typ = - {
  1. mutable activation : typ;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val run_activation : Optimise.Algodiff.t -> typ -> Optimise.Algodiff.t
val copy : neuron_typ -> neuron_typ
val activation_to_string : typ -> string
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Add/index.html b/owl/Owl_neural/D/Graph/Neuron/Add/index.html deleted file mode 100644 index a1ca092dc..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Add/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Add (owl.Owl_neural.D.Graph.Neuron.Add)

Module Neuron.Add

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Add.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/AlphaDropout/index.html b/owl/Owl_neural/D/Graph/Neuron/AlphaDropout/index.html deleted file mode 100644 index 71441cf42..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/AlphaDropout/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -AlphaDropout (owl.Owl_neural.D.Graph.Neuron.AlphaDropout)

Module Neuron.AlphaDropout

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.AlphaDropout.neuron_typ = - {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Average/index.html b/owl/Owl_neural/D/Graph/Neuron/Average/index.html deleted file mode 100644 index ba213392d..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Average/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Average (owl.Owl_neural.D.Graph.Neuron.Average)

Module Neuron.Average

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Average.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/AvgPool1D/index.html b/owl/Owl_neural/D/Graph/Neuron/AvgPool1D/index.html deleted file mode 100644 index fbe3e80fe..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/AvgPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -AvgPool1D (owl.Owl_neural.D.Graph.Neuron.AvgPool1D)

Module Neuron.AvgPool1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.AvgPool1D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/AvgPool2D/index.html b/owl/Owl_neural/D/Graph/Neuron/AvgPool2D/index.html deleted file mode 100644 index a7e39f9b7..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/AvgPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -AvgPool2D (owl.Owl_neural.D.Graph.Neuron.AvgPool2D)

Module Neuron.AvgPool2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.AvgPool2D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Concatenate/index.html b/owl/Owl_neural/D/Graph/Neuron/Concatenate/index.html deleted file mode 100644 index 5599800b7..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Concatenate/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Concatenate (owl.Owl_neural.D.Graph.Neuron.Concatenate)

Module Neuron.Concatenate

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Concatenate.neuron_typ = - {
  1. mutable axis : int;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Conv1D/index.html b/owl/Owl_neural/D/Graph/Neuron/Conv1D/index.html deleted file mode 100644 index a79069947..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Conv1D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Conv1D (owl.Owl_neural.D.Graph.Neuron.Conv1D)

Module Neuron.Conv1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Conv1D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Conv2D/index.html b/owl/Owl_neural/D/Graph/Neuron/Conv2D/index.html deleted file mode 100644 index 25cbb854a..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Conv2D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Conv2D (owl.Owl_neural.D.Graph.Neuron.Conv2D)

Module Neuron.Conv2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Conv2D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Conv3D/index.html b/owl/Owl_neural/D/Graph/Neuron/Conv3D/index.html deleted file mode 100644 index 6a2631f7a..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Conv3D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Conv3D (owl.Owl_neural.D.Graph.Neuron.Conv3D)

Module Neuron.Conv3D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Conv3D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/DilatedConv1D/index.html b/owl/Owl_neural/D/Graph/Neuron/DilatedConv1D/index.html deleted file mode 100644 index 5895a8730..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/DilatedConv1D/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -DilatedConv1D (owl.Owl_neural.D.Graph.Neuron.DilatedConv1D)

Module Neuron.DilatedConv1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.DilatedConv1D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/DilatedConv2D/index.html b/owl/Owl_neural/D/Graph/Neuron/DilatedConv2D/index.html deleted file mode 100644 index 7ff90156a..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/DilatedConv2D/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -DilatedConv2D (owl.Owl_neural.D.Graph.Neuron.DilatedConv2D)

Module Neuron.DilatedConv2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.DilatedConv2D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/DilatedConv3D/index.html b/owl/Owl_neural/D/Graph/Neuron/DilatedConv3D/index.html deleted file mode 100644 index 2fca0dc79..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/DilatedConv3D/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -DilatedConv3D (owl.Owl_neural.D.Graph.Neuron.DilatedConv3D)

Module Neuron.DilatedConv3D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.DilatedConv3D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Dot/index.html b/owl/Owl_neural/D/Graph/Neuron/Dot/index.html deleted file mode 100644 index ed1cf667c..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Dot/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Dot (owl.Owl_neural.D.Graph.Neuron.Dot)

Module Neuron.Dot

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Dot.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Dropout/index.html b/owl/Owl_neural/D/Graph/Neuron/Dropout/index.html deleted file mode 100644 index 10feb6474..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Dropout/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Dropout (owl.Owl_neural.D.Graph.Neuron.Dropout)

Module Neuron.Dropout

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Dropout.neuron_typ = - {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Embedding/index.html b/owl/Owl_neural/D/Graph/Neuron/Embedding/index.html deleted file mode 100644 index 15a901035..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Embedding/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Embedding (owl.Owl_neural.D.Graph.Neuron.Embedding)

Module Neuron.Embedding

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Embedding.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_dim : int;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Flatten/index.html b/owl/Owl_neural/D/Graph/Neuron/Flatten/index.html deleted file mode 100644 index 91acc23bc..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Flatten/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Flatten (owl.Owl_neural.D.Graph.Neuron.Flatten)

Module Neuron.Flatten

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Flatten.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/FullyConnected/index.html b/owl/Owl_neural/D/Graph/Neuron/FullyConnected/index.html deleted file mode 100644 index 02b31db80..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/FullyConnected/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -FullyConnected (owl.Owl_neural.D.Graph.Neuron.FullyConnected)

Module Neuron.FullyConnected

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.FullyConnected.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/GRU/index.html b/owl/Owl_neural/D/Graph/Neuron/GRU/index.html deleted file mode 100644 index 6a026572a..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/GRU/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GRU (owl.Owl_neural.D.Graph.Neuron.GRU)

Module Neuron.GRU

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.GRU.neuron_typ = - {
  1. mutable wxz : Optimise.Algodiff.t;
  2. mutable whz : Optimise.Algodiff.t;
  3. mutable wxr : Optimise.Algodiff.t;
  4. mutable whr : Optimise.Algodiff.t;
  5. mutable wxh : Optimise.Algodiff.t;
  6. mutable whh : Optimise.Algodiff.t;
  7. mutable bz : Optimise.Algodiff.t;
  8. mutable br : Optimise.Algodiff.t;
  9. mutable bh : Optimise.Algodiff.t;
  10. mutable h : Optimise.Algodiff.t;
  11. mutable init_typ : Init.typ;
  12. mutable in_shape : int array;
  13. mutable out_shape : int array;
}
val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/GaussianDropout/index.html b/owl/Owl_neural/D/Graph/Neuron/GaussianDropout/index.html deleted file mode 100644 index a6d875c41..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/GaussianDropout/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GaussianDropout (owl.Owl_neural.D.Graph.Neuron.GaussianDropout)

Module Neuron.GaussianDropout

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.GaussianDropout.neuron_typ = - {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/GaussianNoise/index.html b/owl/Owl_neural/D/Graph/Neuron/GaussianNoise/index.html deleted file mode 100644 index 4d1582cfc..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/GaussianNoise/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GaussianNoise (owl.Owl_neural.D.Graph.Neuron.GaussianNoise)

Module Neuron.GaussianNoise

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.GaussianNoise.neuron_typ = - {
  1. mutable sigma : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/GlobalAvgPool1D/index.html b/owl/Owl_neural/D/Graph/Neuron/GlobalAvgPool1D/index.html deleted file mode 100644 index a1bd45a07..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/GlobalAvgPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalAvgPool1D (owl.Owl_neural.D.Graph.Neuron.GlobalAvgPool1D)

Module Neuron.GlobalAvgPool1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.GlobalAvgPool1D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/GlobalAvgPool2D/index.html b/owl/Owl_neural/D/Graph/Neuron/GlobalAvgPool2D/index.html deleted file mode 100644 index e2504523b..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/GlobalAvgPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalAvgPool2D (owl.Owl_neural.D.Graph.Neuron.GlobalAvgPool2D)

Module Neuron.GlobalAvgPool2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.GlobalAvgPool2D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/GlobalMaxPool1D/index.html b/owl/Owl_neural/D/Graph/Neuron/GlobalMaxPool1D/index.html deleted file mode 100644 index 84355e393..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/GlobalMaxPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalMaxPool1D (owl.Owl_neural.D.Graph.Neuron.GlobalMaxPool1D)

Module Neuron.GlobalMaxPool1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.GlobalMaxPool1D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/GlobalMaxPool2D/index.html b/owl/Owl_neural/D/Graph/Neuron/GlobalMaxPool2D/index.html deleted file mode 100644 index 679801f6f..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/GlobalMaxPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalMaxPool2D (owl.Owl_neural.D.Graph.Neuron.GlobalMaxPool2D)

Module Neuron.GlobalMaxPool2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.GlobalMaxPool2D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Init/index.html b/owl/Owl_neural/D/Graph/Neuron/Init/index.html deleted file mode 100644 index 02c56ef81..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Init/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Init (owl.Owl_neural.D.Graph.Neuron.Init)

Module Neuron.Init

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Init.typ = -
  1. | Uniform of float * float
  2. | Gaussian of float * float
  3. | Standard
  4. | Tanh
  5. | GlorotNormal
  6. | GlorotUniform
  7. | LecunNormal
  8. | HeNormal
  9. | Custom of int array -> Optimise.Algodiff.t
val calc_fans : int array -> float * float
val run : typ -> int array -> Optimise.Algodiff.t -> Optimise.Algodiff.t
val to_string : typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Input/index.html b/owl/Owl_neural/D/Graph/Neuron/Input/index.html deleted file mode 100644 index 4a4136262..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Input/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Input (owl.Owl_neural.D.Graph.Neuron.Input)

Module Neuron.Input

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Input.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : int array -> neuron_typ
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/LSTM/index.html b/owl/Owl_neural/D/Graph/Neuron/LSTM/index.html deleted file mode 100644 index c4c54236a..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/LSTM/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -LSTM (owl.Owl_neural.D.Graph.Neuron.LSTM)

Module Neuron.LSTM

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.LSTM.neuron_typ = - {
  1. mutable wxi : Optimise.Algodiff.t;
  2. mutable whi : Optimise.Algodiff.t;
  3. mutable wxc : Optimise.Algodiff.t;
  4. mutable whc : Optimise.Algodiff.t;
  5. mutable wxf : Optimise.Algodiff.t;
  6. mutable whf : Optimise.Algodiff.t;
  7. mutable wxo : Optimise.Algodiff.t;
  8. mutable who : Optimise.Algodiff.t;
  9. mutable bi : Optimise.Algodiff.t;
  10. mutable bc : Optimise.Algodiff.t;
  11. mutable bf : Optimise.Algodiff.t;
  12. mutable bo : Optimise.Algodiff.t;
  13. mutable c : Optimise.Algodiff.t;
  14. mutable h : Optimise.Algodiff.t;
  15. mutable init_typ : Init.typ;
  16. mutable in_shape : int array;
  17. mutable out_shape : int array;
}
val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Lambda/index.html b/owl/Owl_neural/D/Graph/Neuron/Lambda/index.html deleted file mode 100644 index 4c1a957a1..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Lambda/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Lambda (owl.Owl_neural.D.Graph.Neuron.Lambda)

Module Neuron.Lambda

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Lambda.neuron_typ = - {
  1. mutable lambda : Optimise.Algodiff.t -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : - ?out_shape:int array -> - (Optimise.Algodiff.t -> Optimise.Algodiff.t) -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/LambdaArray/index.html b/owl/Owl_neural/D/Graph/Neuron/LambdaArray/index.html deleted file mode 100644 index f51d199fb..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/LambdaArray/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -LambdaArray (owl.Owl_neural.D.Graph.Neuron.LambdaArray)

Module Neuron.LambdaArray

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.LambdaArray.neuron_typ = - {
  1. mutable lambda : Optimise.Algodiff.t array -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : - int array -> - (Optimise.Algodiff.t array -> Optimise.Algodiff.t) -> - neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Linear/index.html b/owl/Owl_neural/D/Graph/Neuron/Linear/index.html deleted file mode 100644 index 5982661ff..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Linear/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Linear (owl.Owl_neural.D.Graph.Neuron.Linear)

Module Neuron.Linear

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Linear.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/LinearNoBias/index.html b/owl/Owl_neural/D/Graph/Neuron/LinearNoBias/index.html deleted file mode 100644 index aefef8fba..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/LinearNoBias/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -LinearNoBias (owl.Owl_neural.D.Graph.Neuron.LinearNoBias)

Module Neuron.LinearNoBias

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.LinearNoBias.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_shape : int array;
  4. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Masking/index.html b/owl/Owl_neural/D/Graph/Neuron/Masking/index.html deleted file mode 100644 index b59af75b1..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Masking/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Masking (owl.Owl_neural.D.Graph.Neuron.Masking)

Module Neuron.Masking

\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Max/index.html b/owl/Owl_neural/D/Graph/Neuron/Max/index.html deleted file mode 100644 index fc92080f4..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Max/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Max (owl.Owl_neural.D.Graph.Neuron.Max)

Module Neuron.Max

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Max.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/MaxPool1D/index.html b/owl/Owl_neural/D/Graph/Neuron/MaxPool1D/index.html deleted file mode 100644 index 051041bda..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/MaxPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -MaxPool1D (owl.Owl_neural.D.Graph.Neuron.MaxPool1D)

Module Neuron.MaxPool1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.MaxPool1D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/MaxPool2D/index.html b/owl/Owl_neural/D/Graph/Neuron/MaxPool2D/index.html deleted file mode 100644 index 13cc7ac35..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/MaxPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -MaxPool2D (owl.Owl_neural.D.Graph.Neuron.MaxPool2D)

Module Neuron.MaxPool2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.MaxPool2D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Mul/index.html b/owl/Owl_neural/D/Graph/Neuron/Mul/index.html deleted file mode 100644 index f30f8095f..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Mul/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Mul (owl.Owl_neural.D.Graph.Neuron.Mul)

Module Neuron.Mul

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Mul.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Normalisation/index.html b/owl/Owl_neural/D/Graph/Neuron/Normalisation/index.html deleted file mode 100644 index 98906ae1e..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Normalisation/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Normalisation (owl.Owl_neural.D.Graph.Neuron.Normalisation)

Module Neuron.Normalisation

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Normalisation.neuron_typ = - {
  1. mutable axis : int;
  2. mutable beta : Optimise.Algodiff.t;
  3. mutable gamma : Optimise.Algodiff.t;
  4. mutable mu : Optimise.Algodiff.t;
  5. mutable var : Optimise.Algodiff.t;
  6. mutable decay : Optimise.Algodiff.t;
  7. mutable training : bool;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?training:bool -> - ?decay:float -> - ?mu:Optimise.Algodiff.A.arr -> - ?var:Optimise.Algodiff.A.arr -> - int -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val load_weights : neuron_typ -> Optimise.Algodiff.t array -> unit
val save_weights : neuron_typ -> Optimise.Algodiff.t array
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index 9433eed27..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index 5f5c8efd7..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index 32c6bf098..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/index.html deleted file mode 100644 index a40f5634f..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,160 +0,0 @@ - -A (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.A)

Module Algodiff.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Arr/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Arr/index.html deleted file mode 100644 index e61735791..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/index.html deleted file mode 100644 index 832b3d906..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.Builder)

Module Algodiff.Builder

module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t
module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t
module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t
module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array
module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t
module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index 8770e4f42..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index 200cb5e9a..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index fa3266bf0..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index dd9d07816..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index 494254633..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index 308280e7c..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Linalg/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index f0f60381e..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t
val logdet : t -> t
val chol : ?upper:bool -> t -> t
val qr : t -> t * t
val lq : t -> t * t
val svd : ?thin:bool -> t -> t * t * t
val sylvester : t -> t -> t -> t
val lyapunov : t -> t -> t
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - t -> - t -> - t
val (/@) : t -> t -> t
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> t -> t -> t
val care : ?diag_r:bool -> t -> t -> t -> t -> t
val dare : ?diag_r:bool -> t -> t -> t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Mat/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Mat/index.html deleted file mode 100644 index 036e428a8..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Maths/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Maths/index.html deleted file mode 100644 index 483d762f3..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t
val (-) : t -> t -> t
val (*) : t -> t -> t
val (/) : t -> t -> t
val (*@) : t -> t -> t
val (**) : t -> t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val kron : t -> t -> t
val dot : t -> t -> t
val pow : t -> t -> t
val atan2 : t -> t -> t
val min2 : t -> t -> t
val max2 : t -> t -> t
val cross_entropy : t -> t -> t
val inv : t -> t
val neg : t -> t
val abs : t -> t
val signum : t -> t
val floor : t -> t
val ceil : t -> t
val round : t -> t
val sqr : t -> t
val sqrt : t -> t
val log : t -> t
val log2 : t -> t
val log10 : t -> t
val exp : t -> t
val sin : t -> t
val cos : t -> t
val tan : t -> t
val sinh : t -> t
val cosh : t -> t
val tanh : t -> t
val asin : t -> t
val acos : t -> t
val atan : t -> t
val asinh : t -> t
val acosh : t -> t
val atanh : t -> t
val sum' : t -> t
val log_sum_exp' : t -> t
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t
val sum : ?axis:int -> ?keep_dims:bool -> t -> t
val sum_reduce : ?axis:int array -> t -> t
val mean : t -> t
val transpose : ?axis:int array -> t -> t
val swap : int -> int -> t -> t
val l1norm' : t -> t
val l2norm' : t -> t
val l2norm_sqr' : t -> t
val sigmoid : t -> t
val relu : t -> t
val dawsn : t -> t
val softplus : t -> t
val softsign : t -> t
val softmax : ?axis:int -> t -> t
val reshape : t -> int array -> t
val flatten : t -> t
val get_item : t -> int -> int -> t
val get_row : t -> int -> t
val concat : axis:int -> t -> t -> t
val split : axis:int -> int array -> t -> t array
val of_arrays : t array array -> t
val to_arrays : t -> t array array
val concatenate : axis:int -> t array -> t
val stack : axis:int -> t array -> t
val get_slice : int list list -> t -> t
val set_slice : int list list -> t -> t -> t
val get_fancy : Owl_types.index list -> t -> t
val set_fancy : Owl_types.index list -> t -> t -> t
val diag : ?k:int -> t -> t
val diagm : ?k:int -> t -> t
val trace : t -> t
val triu : ?k:int -> t -> t
val tril : ?k:int -> t -> t
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/NN/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/NN/index.html deleted file mode 100644 index 52fe71191..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t
val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val upsampling2d : t -> int array -> t
val pad : ?v:A.elt -> int list list -> t -> t
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/index.html deleted file mode 100644 index 4c5d95841..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Algodiff/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Algodiff (owl.Owl_neural.D.Graph.Neuron.Optimise.Algodiff)

Module Optimise.Algodiff

module A : sig ... end
type t = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Optimise.Algodiff.t = -
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
val tag : unit -> int
val primal : t -> t
val primal' : t -> t
val zero : t -> t
val reset_zero : t -> t
val tangent : t -> t
val adjref : t -> t Stdlib.ref
val adjval : t -> t
val shape : t -> int array
val is_float : t -> bool
val is_arr : t -> bool
val row_num : t -> int
val col_num : t -> int
val numel : t -> int
val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t
val clip_by_l2norm : A.elt -> t -> t
val copy_primal' : t -> t
val tile : t -> int array -> t
val repeat : t -> int array -> t
val pack_elt : A.elt -> t
val unpack_elt : t -> A.elt
val pack_flt : float -> t
val _f : float -> t
val unpack_flt : t -> float
val pack_arr : A.arr -> t
val unpack_arr : t -> A.arr
val deep_info : t -> string
val type_info : t -> string
val error_binop : string -> t -> t -> 'a
val error_uniop : string -> t -> 'a
val make_forward : t -> t -> int -> t
val make_reverse : t -> int -> t
val reverse_prop : t -> t -> unit
val diff : (t -> t) -> t -> t
val diff' : (t -> t) -> t -> t * t
val grad : (t -> t) -> t -> t
val grad' : (t -> t) -> t -> t * t
val jacobian : (t -> t) -> t -> t
val jacobian' : (t -> t) -> t -> t * t
val jacobianv : (t -> t) -> t -> t -> t
val jacobianv' : (t -> t) -> t -> t -> t * t
val jacobianTv : (t -> t) -> t -> t -> t
val jacobianTv' : (t -> t) -> t -> t -> t * t
val hessian : (t -> t) -> t -> t
val hessian' : (t -> t) -> t -> t * t
val hessianv : (t -> t) -> t -> t -> t
val hessianv' : (t -> t) -> t -> t -> t * t
val laplacian : (t -> t) -> t -> t
val laplacian' : (t -> t) -> t -> t * t
val gradhessian : (t -> t) -> t -> t * t
val gradhessian' : (t -> t) -> t -> t * t * t
val gradhessianv : (t -> t) -> t -> t -> t * t
val gradhessianv' : (t -> t) -> t -> t -> t * t * t
module Builder : sig ... end
module Maths : sig ... end
module Linalg : sig ... end
module NN : sig ... end
module Mat : sig ... end
module Arr : sig ... end
val to_trace : t list -> string
val to_dot : t list -> string
val pp_num : Stdlib.Format.formatter -> t -> unit
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Batch/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Batch/index.html deleted file mode 100644 index 2c21cd166..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Batch/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Batch (owl.Owl_neural.D.Graph.Neuron.Optimise.Batch)

Module Optimise.Batch

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Optimise.Batch.typ = -
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic
val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val batches : typ -> Algodiff.t -> int
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Checkpoint/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Checkpoint/index.html deleted file mode 100644 index 1ebb5bff3..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Checkpoint/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Checkpoint (owl.Owl_neural.D.Graph.Neuron.Optimise.Checkpoint)

Module Optimise.Checkpoint

type state = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Optimise.Checkpoint.state = - {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}
type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Optimise.Checkpoint.typ = -
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None
val init_state : int -> float -> state
val default_checkpoint_fun : (string -> 'a) -> 'a
val print_state_info : state -> unit
val print_summary : state -> unit
val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Clipping/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Clipping/index.html deleted file mode 100644 index 70054638d..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Clipping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Clipping (owl.Owl_neural.D.Graph.Neuron.Optimise.Clipping)

Module Optimise.Clipping

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Optimise.Clipping.typ = -
  1. | L2norm of float
  2. | Value of float * float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Gradient/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Gradient/index.html deleted file mode 100644 index 43cd85876..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Gradient/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Gradient (owl.Owl_neural.D.Graph.Neuron.Optimise.Gradient)

Module Optimise.Gradient

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Optimise.Gradient.typ = -
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton
val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Learning_Rate/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Learning_Rate/index.html deleted file mode 100644 index ed1b49442..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Learning_Rate (owl.Owl_neural.D.Graph.Neuron.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Optimise.Learning_Rate.typ = -
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t
val default : typ -> typ
val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Loss/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Loss/index.html deleted file mode 100644 index e33230f11..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Loss/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Loss (owl.Owl_neural.D.Graph.Neuron.Optimise.Loss)

Module Optimise.Loss

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Optimise.Loss.typ = -
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Momentum/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Momentum/index.html deleted file mode 100644 index 1f2ec651a..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Momentum/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Momentum (owl.Owl_neural.D.Graph.Neuron.Optimise.Momentum)

Module Optimise.Momentum

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Optimise.Momentum.typ = -
  1. | Standard of float
  2. | Nesterov of float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Params/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Params/index.html deleted file mode 100644 index 5b9885de6..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Params/index.html +++ /dev/null @@ -1,16 +0,0 @@ - -Params (owl.Owl_neural.D.Graph.Neuron.Optimise.Params)

Module Optimise.Params

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Optimise.Params.typ = - {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}
val default : unit -> typ
val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Regularisation/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Regularisation/index.html deleted file mode 100644 index 022d96dac..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Regularisation/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Regularisation (owl.Owl_neural.D.Graph.Neuron.Optimise.Regularisation)

Module Optimise.Regularisation

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Optimise.Regularisation.typ = -
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None
val run : typ -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Stopping/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Stopping/index.html deleted file mode 100644 index 1ac5a60e2..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Stopping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Stopping (owl.Owl_neural.D.Graph.Neuron.Optimise.Stopping)

Module Optimise.Stopping

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Optimise.Stopping.typ = -
  1. | Const of float
  2. | Early of int * int
  3. | None
val run : typ -> float -> bool
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/Utils/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/Utils/index.html deleted file mode 100644 index b0a3d6018..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl.Owl_neural.D.Graph.Neuron.Optimise.Utils)

Module Optimise.Utils

val sample_num : Algodiff.t -> int
val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Optimise/index.html b/owl/Owl_neural/D/Graph/Neuron/Optimise/index.html deleted file mode 100644 index 8d4bf8c7f..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl.Owl_neural.D.Graph.Neuron.Optimise)

Module Neuron.Optimise

module Algodiff : sig ... end
module Utils : sig ... end
module Learning_Rate : sig ... end
module Batch : sig ... end
module Loss : sig ... end
module Gradient : sig ... end
module Momentum : sig ... end
module Regularisation : sig ... end
module Clipping : sig ... end
module Stopping : sig ... end
module Checkpoint : sig ... end
module Params : sig ... end
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Padding1D/index.html b/owl/Owl_neural/D/Graph/Neuron/Padding1D/index.html deleted file mode 100644 index 19d02abea..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Padding1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding1D (owl.Owl_neural.D.Graph.Neuron.Padding1D)

Module Neuron.Padding1D

\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Padding2D/index.html b/owl/Owl_neural/D/Graph/Neuron/Padding2D/index.html deleted file mode 100644 index 125abea0b..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Padding2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Padding2D (owl.Owl_neural.D.Graph.Neuron.Padding2D)

Module Neuron.Padding2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Padding2D.neuron_typ = - {
  1. mutable padding : int array array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int array array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Padding3D/index.html b/owl/Owl_neural/D/Graph/Neuron/Padding3D/index.html deleted file mode 100644 index 509d3c72e..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Padding3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding3D (owl.Owl_neural.D.Graph.Neuron.Padding3D)

Module Neuron.Padding3D

\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Recurrent/index.html b/owl/Owl_neural/D/Graph/Neuron/Recurrent/index.html deleted file mode 100644 index fa9ef05ad..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Recurrent/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Recurrent (owl.Owl_neural.D.Graph.Neuron.Recurrent)

Module Neuron.Recurrent

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Recurrent.neuron_typ = - {
  1. mutable whh : Optimise.Algodiff.t;
  2. mutable wxh : Optimise.Algodiff.t;
  3. mutable why : Optimise.Algodiff.t;
  4. mutable bh : Optimise.Algodiff.t;
  5. mutable by : Optimise.Algodiff.t;
  6. mutable h : Optimise.Algodiff.t;
  7. mutable hiddens : int;
  8. mutable act : Activation.typ;
  9. mutable init_typ : Init.typ;
  10. mutable in_shape : int array;
  11. mutable out_shape : int array;
}
val create : - ?time_steps:int -> - ?inputs:int -> - int -> - int -> - Activation.typ -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Reshape/index.html b/owl/Owl_neural/D/Graph/Neuron/Reshape/index.html deleted file mode 100644 index 6935ecbea..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Reshape/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Reshape (owl.Owl_neural.D.Graph.Neuron.Reshape)

Module Neuron.Reshape

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Reshape.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : ?inputs:int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/Slice/index.html b/owl/Owl_neural/D/Graph/Neuron/Slice/index.html deleted file mode 100644 index b0a2c05ab..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/Slice/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Slice (owl.Owl_neural.D.Graph.Neuron.Slice)

Module Neuron.Slice

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.Slice.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
  3. mutable slice : int list list;
}
val create : int list list -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/TransposeConv1D/index.html b/owl/Owl_neural/D/Graph/Neuron/TransposeConv1D/index.html deleted file mode 100644 index 2bb2fef04..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/TransposeConv1D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -TransposeConv1D (owl.Owl_neural.D.Graph.Neuron.TransposeConv1D)

Module Neuron.TransposeConv1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.TransposeConv1D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/TransposeConv2D/index.html b/owl/Owl_neural/D/Graph/Neuron/TransposeConv2D/index.html deleted file mode 100644 index a529645b9..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/TransposeConv2D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -TransposeConv2D (owl.Owl_neural.D.Graph.Neuron.TransposeConv2D)

Module Neuron.TransposeConv2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.TransposeConv2D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/TransposeConv3D/index.html b/owl/Owl_neural/D/Graph/Neuron/TransposeConv3D/index.html deleted file mode 100644 index 92a805373..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/TransposeConv3D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -TransposeConv3D (owl.Owl_neural.D.Graph.Neuron.TransposeConv3D)

Module Neuron.TransposeConv3D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.TransposeConv3D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/UpSampling1D/index.html b/owl/Owl_neural/D/Graph/Neuron/UpSampling1D/index.html deleted file mode 100644 index 49227fba7..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/UpSampling1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling1D (owl.Owl_neural.D.Graph.Neuron.UpSampling1D)

Module Neuron.UpSampling1D

\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/UpSampling2D/index.html b/owl/Owl_neural/D/Graph/Neuron/UpSampling2D/index.html deleted file mode 100644 index facc1be80..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/UpSampling2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -UpSampling2D (owl.Owl_neural.D.Graph.Neuron.UpSampling2D)

Module Neuron.UpSampling2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.UpSampling2D.neuron_typ = - {
  1. mutable size : int array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/UpSampling3D/index.html b/owl/Owl_neural/D/Graph/Neuron/UpSampling3D/index.html deleted file mode 100644 index 350e9d2b3..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/UpSampling3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling3D (owl.Owl_neural.D.Graph.Neuron.UpSampling3D)

Module Neuron.UpSampling3D

\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/Neuron/index.html b/owl/Owl_neural/D/Graph/Neuron/index.html deleted file mode 100644 index 93cc29759..000000000 --- a/owl/Owl_neural/D/Graph/Neuron/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Neuron (owl.Owl_neural.D.Graph.Neuron)

Module Graph.Neuron

module Optimise : sig ... end
module Init : sig ... end
module Input : sig ... end
module Activation : sig ... end
module Linear : sig ... end
module LinearNoBias : sig ... end
module Recurrent : sig ... end
module LSTM : sig ... end
module GRU : sig ... end
module Conv1D : sig ... end
module Conv2D : sig ... end
module Conv3D : sig ... end
module DilatedConv1D : sig ... end
module DilatedConv2D : sig ... end
module DilatedConv3D : sig ... end
module TransposeConv1D : sig ... end
module TransposeConv2D : sig ... end
module TransposeConv3D : sig ... end
module FullyConnected : sig ... end
module MaxPool1D : sig ... end
module MaxPool2D : sig ... end
module AvgPool1D : sig ... end
module AvgPool2D : sig ... end
module GlobalMaxPool1D : sig ... end
module GlobalMaxPool2D : sig ... end
module GlobalAvgPool1D : sig ... end
module GlobalAvgPool2D : sig ... end
module UpSampling1D : sig ... end
module UpSampling2D : sig ... end
module UpSampling3D : sig ... end
module Padding1D : sig ... end
module Padding2D : sig ... end
module Padding3D : sig ... end
module Lambda : sig ... end
module LambdaArray : sig ... end
module Dropout : sig ... end
module Reshape : sig ... end
module Flatten : sig ... end
module Slice : sig ... end
module Add : sig ... end
module Mul : sig ... end
module Dot : sig ... end
module Max : sig ... end
module Average : sig ... end
module Concatenate : sig ... end
module Normalisation : sig ... end
module GaussianNoise : sig ... end
module GaussianDropout : sig ... end
module AlphaDropout : sig ... end
module Embedding : sig ... end
module Masking : sig ... end
type neuron = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).Neuron.neuron = -
  1. | Input of Input.neuron_typ
  2. | Linear of Linear.neuron_typ
  3. | LinearNoBias of LinearNoBias.neuron_typ
  4. | Embedding of Embedding.neuron_typ
  5. | LSTM of LSTM.neuron_typ
  6. | GRU of GRU.neuron_typ
  7. | Recurrent of Recurrent.neuron_typ
  8. | Conv1D of Conv1D.neuron_typ
  9. | Conv2D of Conv2D.neuron_typ
  10. | Conv3D of Conv3D.neuron_typ
  11. | DilatedConv1D of DilatedConv1D.neuron_typ
  12. | DilatedConv2D of DilatedConv2D.neuron_typ
  13. | DilatedConv3D of DilatedConv3D.neuron_typ
  14. | TransposeConv1D of TransposeConv1D.neuron_typ
  15. | TransposeConv2D of TransposeConv2D.neuron_typ
  16. | TransposeConv3D of TransposeConv3D.neuron_typ
  17. | FullyConnected of FullyConnected.neuron_typ
  18. | MaxPool1D of MaxPool1D.neuron_typ
  19. | MaxPool2D of MaxPool2D.neuron_typ
  20. | AvgPool1D of AvgPool1D.neuron_typ
  21. | AvgPool2D of AvgPool2D.neuron_typ
  22. | GlobalMaxPool1D of GlobalMaxPool1D.neuron_typ
  23. | GlobalMaxPool2D of GlobalMaxPool2D.neuron_typ
  24. | GlobalAvgPool1D of GlobalAvgPool1D.neuron_typ
  25. | GlobalAvgPool2D of GlobalAvgPool2D.neuron_typ
  26. | UpSampling2D of UpSampling2D.neuron_typ
  27. | Padding2D of Padding2D.neuron_typ
  28. | Dropout of Dropout.neuron_typ
  29. | Reshape of Reshape.neuron_typ
  30. | Flatten of Flatten.neuron_typ
  31. | Slice of Slice.neuron_typ
  32. | Lambda of Lambda.neuron_typ
  33. | LambdaArray of LambdaArray.neuron_typ
  34. | Activation of Activation.neuron_typ
  35. | GaussianNoise of GaussianNoise.neuron_typ
  36. | GaussianDropout of GaussianDropout.neuron_typ
  37. | AlphaDropout of AlphaDropout.neuron_typ
  38. | Normalisation of Normalisation.neuron_typ
  39. | Add of Add.neuron_typ
  40. | Mul of Mul.neuron_typ
  41. | Dot of Dot.neuron_typ
  42. | Max of Max.neuron_typ
  43. | Average of Average.neuron_typ
  44. | Concatenate of Concatenate.neuron_typ
val get_in_out_shape : neuron -> int array * int array
val get_in_shape : neuron -> int array
val get_out_shape : neuron -> int array
val connect : int array array -> neuron -> unit
val init : neuron -> unit
val reset : neuron -> unit
val mktag : int -> neuron -> unit
val mkpar : neuron -> Optimise.Algodiff.t array
val mkpri : neuron -> Optimise.Algodiff.t array
val mkadj : neuron -> Optimise.Algodiff.t array
val update : neuron -> Optimise.Algodiff.t array -> unit
val load_weights : neuron -> Optimise.Algodiff.t array -> unit
val save_weights : neuron -> Optimise.Algodiff.t array
val copy : neuron -> neuron
val to_string : neuron -> string
val to_name : neuron -> string
\ No newline at end of file diff --git a/owl/Owl_neural/D/Graph/index.html b/owl/Owl_neural/D/Graph/index.html deleted file mode 100644 index 1577abbb2..000000000 --- a/owl/Owl_neural/D/Graph/index.html +++ /dev/null @@ -1,245 +0,0 @@ - -Graph (owl.Owl_neural.D.Graph)

Module D.Graph

module Neuron : sig ... end
type node = Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).node = {
  1. mutable name : string;
  2. mutable prev : node array;
  3. mutable next : node array;
  4. mutable neuron : Neuron.neuron;
  5. mutable output : Neuron.Optimise.Algodiff.t option;
  6. mutable network : network;
  7. mutable train : bool;
}
and network = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.D).network = - {
  1. mutable nnid : string;
  2. mutable size : int;
  3. mutable roots : node array;
  4. mutable outputs : node array;
  5. mutable topo : node array;
}
val make_network : ?nnid:string -> int -> node array -> node array -> network
val make_node : - ?name:string -> - ?train:bool -> - node array -> - node array -> - Neuron.neuron -> - Neuron.Optimise.Algodiff.t option -> - network -> - node
val get_roots : network -> node array
val get_outputs : network -> node array
val get_node : network -> string -> node
val get_network : ?name:string -> node -> network
val outputs : ?name:string -> node array -> network
val get_network_name : network -> string
val set_network_name : network -> string -> unit
val collect_output : node array -> Neuron.Optimise.Algodiff.t array
val connect_pair : node -> node -> unit
val connect_to_parents : node array -> node -> unit
val add_node : - ?act_typ:Neuron.Activation.typ -> - network -> - node array -> - node -> - node
val input_shape : network -> int array
val input_shapes : network -> int array array
val init : network -> unit
val reset : network -> unit
val mktag : int -> network -> unit
val mkpar : network -> Neuron.Optimise.Algodiff.t array array
val mkpri : network -> Neuron.Optimise.Algodiff.t array array
val mkadj : network -> Neuron.Optimise.Algodiff.t array array
val update : network -> Neuron.Optimise.Algodiff.t array array -> unit
val run_inputs : - Neuron.Optimise.Algodiff.t array -> - network -> - Neuron.Optimise.Algodiff.t array
val forward_inputs : - network -> - Neuron.Optimise.Algodiff.t array -> - Neuron.Optimise.Algodiff.t array * Neuron.Optimise.Algodiff.t array array
val backward : - network -> - Neuron.Optimise.Algodiff.t -> - Neuron.Optimise.Algodiff.t array array - * Neuron.Optimise.Algodiff.t array array
val copy : network -> network
val model_inputs : - network -> - Neuron.Optimise.Algodiff.A.arr array -> - Neuron.Optimise.Algodiff.A.arr array
val input : ?name:string -> int array -> node
val inputs : ?names:string array -> int array array -> node array
val activation : ?name:string -> Neuron.Activation.typ -> node -> node
val linear : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val linear_nobias : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val embedding : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node
val recurrent : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node
val lstm : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node
val gru : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node
val conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val dilated_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val dilated_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val dilated_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val transpose_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val transpose_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val transpose_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val fully_connected : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val max_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val max_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val avg_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val avg_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val global_max_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_max_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_avg_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_avg_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val upsampling2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - node -> - node
val padding2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array array -> - node -> - node
val dropout : ?name:string -> float -> node -> node
val gaussian_noise : ?name:string -> float -> node -> node
val gaussian_dropout : ?name:string -> float -> node -> node
val alpha_dropout : ?name:string -> float -> node -> node
val normalisation : - ?name:string -> - ?axis:int -> - ?training:bool -> - ?decay:float -> - ?mu:Neuron.Optimise.Algodiff.A.arr -> - ?var:Neuron.Optimise.Algodiff.A.arr -> - node -> - node
val reshape : ?name:string -> int array -> node -> node
val flatten : ?name:string -> node -> node
val slice : ?name:string -> int list list -> node -> node
val lambda : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - ?out_shape:int array -> - (Neuron.Optimise.Algodiff.t -> Neuron.Optimise.Algodiff.t) -> - node -> - node
val lambda_array : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - (Neuron.Optimise.Algodiff.t array -> Neuron.Optimise.Algodiff.t) -> - node array -> - node
val add : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val mul : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val dot : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val max : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val average : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node array -> - node
val concatenate : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int -> - node array -> - node
val to_string : network -> string
val pp_network : Stdlib.Format.formatter -> network -> unit
val print : network -> unit
val save : ?unsafe:bool -> network -> string -> unit
val load : string -> network
val save_weights : network -> string -> unit
val load_weights : network -> string -> unit
val make_subnetwork : - ?copy:bool -> - ?make_inputs:string array -> - network -> - string array -> - network
\ No newline at end of file diff --git a/owl/Owl_neural/D/index.html b/owl/Owl_neural/D/index.html deleted file mode 100644 index bd5861e08..000000000 --- a/owl/Owl_neural/D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -D (owl.Owl_neural.D)

Module Owl_neural.D

include sig ... end
module Graph : sig ... end
module Optimise = Graph.Neuron.Optimise
module Init = Graph.Neuron.Init
module Activation = Graph.Neuron.Activation
module Regularisation = Graph.Neuron.Optimise.Regularisation
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Activation/index.html b/owl/Owl_neural/S/Graph/Neuron/Activation/index.html deleted file mode 100644 index 77d4791f1..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Activation/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Activation (owl.Owl_neural.S.Graph.Neuron.Activation)

Module Neuron.Activation

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Activation.typ = -
  1. | Elu
  2. | Relu
  3. | Sigmoid
  4. | HardSigmoid
  5. | Softmax of int
  6. | Softplus
  7. | Softsign
  8. | Tanh
  9. | Relu6
  10. | LeakyRelu of float
  11. | TRelu of float
  12. | Custom of Optimise.Algodiff.t -> Optimise.Algodiff.t
  13. | None
type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Activation.neuron_typ = - {
  1. mutable activation : typ;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val run_activation : Optimise.Algodiff.t -> typ -> Optimise.Algodiff.t
val copy : neuron_typ -> neuron_typ
val activation_to_string : typ -> string
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Add/index.html b/owl/Owl_neural/S/Graph/Neuron/Add/index.html deleted file mode 100644 index 2353ea275..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Add/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Add (owl.Owl_neural.S.Graph.Neuron.Add)

Module Neuron.Add

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Add.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/AlphaDropout/index.html b/owl/Owl_neural/S/Graph/Neuron/AlphaDropout/index.html deleted file mode 100644 index a2fa97dbf..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/AlphaDropout/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -AlphaDropout (owl.Owl_neural.S.Graph.Neuron.AlphaDropout)

Module Neuron.AlphaDropout

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.AlphaDropout.neuron_typ = - {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Average/index.html b/owl/Owl_neural/S/Graph/Neuron/Average/index.html deleted file mode 100644 index 4b770e595..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Average/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Average (owl.Owl_neural.S.Graph.Neuron.Average)

Module Neuron.Average

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Average.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/AvgPool1D/index.html b/owl/Owl_neural/S/Graph/Neuron/AvgPool1D/index.html deleted file mode 100644 index 13c8d5a02..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/AvgPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -AvgPool1D (owl.Owl_neural.S.Graph.Neuron.AvgPool1D)

Module Neuron.AvgPool1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.AvgPool1D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/AvgPool2D/index.html b/owl/Owl_neural/S/Graph/Neuron/AvgPool2D/index.html deleted file mode 100644 index 5e6f20835..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/AvgPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -AvgPool2D (owl.Owl_neural.S.Graph.Neuron.AvgPool2D)

Module Neuron.AvgPool2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.AvgPool2D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Concatenate/index.html b/owl/Owl_neural/S/Graph/Neuron/Concatenate/index.html deleted file mode 100644 index 9d72ae0a1..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Concatenate/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Concatenate (owl.Owl_neural.S.Graph.Neuron.Concatenate)

Module Neuron.Concatenate

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Concatenate.neuron_typ = - {
  1. mutable axis : int;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Conv1D/index.html b/owl/Owl_neural/S/Graph/Neuron/Conv1D/index.html deleted file mode 100644 index 5304d78bf..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Conv1D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Conv1D (owl.Owl_neural.S.Graph.Neuron.Conv1D)

Module Neuron.Conv1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Conv1D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Conv2D/index.html b/owl/Owl_neural/S/Graph/Neuron/Conv2D/index.html deleted file mode 100644 index 5f24d50d0..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Conv2D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Conv2D (owl.Owl_neural.S.Graph.Neuron.Conv2D)

Module Neuron.Conv2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Conv2D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Conv3D/index.html b/owl/Owl_neural/S/Graph/Neuron/Conv3D/index.html deleted file mode 100644 index 71d63d178..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Conv3D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Conv3D (owl.Owl_neural.S.Graph.Neuron.Conv3D)

Module Neuron.Conv3D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Conv3D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/DilatedConv1D/index.html b/owl/Owl_neural/S/Graph/Neuron/DilatedConv1D/index.html deleted file mode 100644 index 861d7ce75..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/DilatedConv1D/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -DilatedConv1D (owl.Owl_neural.S.Graph.Neuron.DilatedConv1D)

Module Neuron.DilatedConv1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.DilatedConv1D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/DilatedConv2D/index.html b/owl/Owl_neural/S/Graph/Neuron/DilatedConv2D/index.html deleted file mode 100644 index 2a69646d4..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/DilatedConv2D/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -DilatedConv2D (owl.Owl_neural.S.Graph.Neuron.DilatedConv2D)

Module Neuron.DilatedConv2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.DilatedConv2D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/DilatedConv3D/index.html b/owl/Owl_neural/S/Graph/Neuron/DilatedConv3D/index.html deleted file mode 100644 index 87f5fcf5e..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/DilatedConv3D/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -DilatedConv3D (owl.Owl_neural.S.Graph.Neuron.DilatedConv3D)

Module Neuron.DilatedConv3D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.DilatedConv3D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable rate : int array;
  6. mutable padding : Owl_types.padding;
  7. mutable init_typ : Init.typ;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Dot/index.html b/owl/Owl_neural/S/Graph/Neuron/Dot/index.html deleted file mode 100644 index 12360114c..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Dot/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Dot (owl.Owl_neural.S.Graph.Neuron.Dot)

Module Neuron.Dot

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Dot.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Dropout/index.html b/owl/Owl_neural/S/Graph/Neuron/Dropout/index.html deleted file mode 100644 index 6e2f69fea..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Dropout/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Dropout (owl.Owl_neural.S.Graph.Neuron.Dropout)

Module Neuron.Dropout

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Dropout.neuron_typ = - {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Embedding/index.html b/owl/Owl_neural/S/Graph/Neuron/Embedding/index.html deleted file mode 100644 index 38087b292..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Embedding/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Embedding (owl.Owl_neural.S.Graph.Neuron.Embedding)

Module Neuron.Embedding

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Embedding.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_dim : int;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Flatten/index.html b/owl/Owl_neural/S/Graph/Neuron/Flatten/index.html deleted file mode 100644 index 75e4fbe20..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Flatten/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Flatten (owl.Owl_neural.S.Graph.Neuron.Flatten)

Module Neuron.Flatten

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Flatten.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/FullyConnected/index.html b/owl/Owl_neural/S/Graph/Neuron/FullyConnected/index.html deleted file mode 100644 index b67ba33e3..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/FullyConnected/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -FullyConnected (owl.Owl_neural.S.Graph.Neuron.FullyConnected)

Module Neuron.FullyConnected

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.FullyConnected.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/GRU/index.html b/owl/Owl_neural/S/Graph/Neuron/GRU/index.html deleted file mode 100644 index 862bce24b..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/GRU/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GRU (owl.Owl_neural.S.Graph.Neuron.GRU)

Module Neuron.GRU

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.GRU.neuron_typ = - {
  1. mutable wxz : Optimise.Algodiff.t;
  2. mutable whz : Optimise.Algodiff.t;
  3. mutable wxr : Optimise.Algodiff.t;
  4. mutable whr : Optimise.Algodiff.t;
  5. mutable wxh : Optimise.Algodiff.t;
  6. mutable whh : Optimise.Algodiff.t;
  7. mutable bz : Optimise.Algodiff.t;
  8. mutable br : Optimise.Algodiff.t;
  9. mutable bh : Optimise.Algodiff.t;
  10. mutable h : Optimise.Algodiff.t;
  11. mutable init_typ : Init.typ;
  12. mutable in_shape : int array;
  13. mutable out_shape : int array;
}
val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/GaussianDropout/index.html b/owl/Owl_neural/S/Graph/Neuron/GaussianDropout/index.html deleted file mode 100644 index f8e902491..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/GaussianDropout/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GaussianDropout (owl.Owl_neural.S.Graph.Neuron.GaussianDropout)

Module Neuron.GaussianDropout

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.GaussianDropout.neuron_typ = - {
  1. mutable rate : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/GaussianNoise/index.html b/owl/Owl_neural/S/Graph/Neuron/GaussianNoise/index.html deleted file mode 100644 index 925d924fa..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/GaussianNoise/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GaussianNoise (owl.Owl_neural.S.Graph.Neuron.GaussianNoise)

Module Neuron.GaussianNoise

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.GaussianNoise.neuron_typ = - {
  1. mutable sigma : float;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : float -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/GlobalAvgPool1D/index.html b/owl/Owl_neural/S/Graph/Neuron/GlobalAvgPool1D/index.html deleted file mode 100644 index fe637b4e7..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/GlobalAvgPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalAvgPool1D (owl.Owl_neural.S.Graph.Neuron.GlobalAvgPool1D)

Module Neuron.GlobalAvgPool1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.GlobalAvgPool1D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/GlobalAvgPool2D/index.html b/owl/Owl_neural/S/Graph/Neuron/GlobalAvgPool2D/index.html deleted file mode 100644 index f722aaa69..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/GlobalAvgPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalAvgPool2D (owl.Owl_neural.S.Graph.Neuron.GlobalAvgPool2D)

Module Neuron.GlobalAvgPool2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.GlobalAvgPool2D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/GlobalMaxPool1D/index.html b/owl/Owl_neural/S/Graph/Neuron/GlobalMaxPool1D/index.html deleted file mode 100644 index b28b18802..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/GlobalMaxPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalMaxPool1D (owl.Owl_neural.S.Graph.Neuron.GlobalMaxPool1D)

Module Neuron.GlobalMaxPool1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.GlobalMaxPool1D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/GlobalMaxPool2D/index.html b/owl/Owl_neural/S/Graph/Neuron/GlobalMaxPool2D/index.html deleted file mode 100644 index 665d6aedb..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/GlobalMaxPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -GlobalMaxPool2D (owl.Owl_neural.S.Graph.Neuron.GlobalMaxPool2D)

Module Neuron.GlobalMaxPool2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.GlobalMaxPool2D.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Init/index.html b/owl/Owl_neural/S/Graph/Neuron/Init/index.html deleted file mode 100644 index ce34efae5..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Init/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Init (owl.Owl_neural.S.Graph.Neuron.Init)

Module Neuron.Init

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Init.typ = -
  1. | Uniform of float * float
  2. | Gaussian of float * float
  3. | Standard
  4. | Tanh
  5. | GlorotNormal
  6. | GlorotUniform
  7. | LecunNormal
  8. | HeNormal
  9. | Custom of int array -> Optimise.Algodiff.t
val calc_fans : int array -> float * float
val run : typ -> int array -> Optimise.Algodiff.t -> Optimise.Algodiff.t
val to_string : typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Input/index.html b/owl/Owl_neural/S/Graph/Neuron/Input/index.html deleted file mode 100644 index d3c300540..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Input/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Input (owl.Owl_neural.S.Graph.Neuron.Input)

Module Neuron.Input

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Input.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : int array -> neuron_typ
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/LSTM/index.html b/owl/Owl_neural/S/Graph/Neuron/LSTM/index.html deleted file mode 100644 index c209ce20f..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/LSTM/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -LSTM (owl.Owl_neural.S.Graph.Neuron.LSTM)

Module Neuron.LSTM

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.LSTM.neuron_typ = - {
  1. mutable wxi : Optimise.Algodiff.t;
  2. mutable whi : Optimise.Algodiff.t;
  3. mutable wxc : Optimise.Algodiff.t;
  4. mutable whc : Optimise.Algodiff.t;
  5. mutable wxf : Optimise.Algodiff.t;
  6. mutable whf : Optimise.Algodiff.t;
  7. mutable wxo : Optimise.Algodiff.t;
  8. mutable who : Optimise.Algodiff.t;
  9. mutable bi : Optimise.Algodiff.t;
  10. mutable bc : Optimise.Algodiff.t;
  11. mutable bf : Optimise.Algodiff.t;
  12. mutable bo : Optimise.Algodiff.t;
  13. mutable c : Optimise.Algodiff.t;
  14. mutable h : Optimise.Algodiff.t;
  15. mutable init_typ : Init.typ;
  16. mutable in_shape : int array;
  17. mutable out_shape : int array;
}
val create : ?time_steps:int -> ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Lambda/index.html b/owl/Owl_neural/S/Graph/Neuron/Lambda/index.html deleted file mode 100644 index 6daf56337..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Lambda/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Lambda (owl.Owl_neural.S.Graph.Neuron.Lambda)

Module Neuron.Lambda

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Lambda.neuron_typ = - {
  1. mutable lambda : Optimise.Algodiff.t -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : - ?out_shape:int array -> - (Optimise.Algodiff.t -> Optimise.Algodiff.t) -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/LambdaArray/index.html b/owl/Owl_neural/S/Graph/Neuron/LambdaArray/index.html deleted file mode 100644 index 8a758389c..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/LambdaArray/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -LambdaArray (owl.Owl_neural.S.Graph.Neuron.LambdaArray)

Module Neuron.LambdaArray

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.LambdaArray.neuron_typ = - {
  1. mutable lambda : Optimise.Algodiff.t array -> Optimise.Algodiff.t;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : - int array -> - (Optimise.Algodiff.t array -> Optimise.Algodiff.t) -> - neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Linear/index.html b/owl/Owl_neural/S/Graph/Neuron/Linear/index.html deleted file mode 100644 index 8fcaf3f06..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Linear/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Linear (owl.Owl_neural.S.Graph.Neuron.Linear)

Module Neuron.Linear

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Linear.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable init_typ : Init.typ;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/LinearNoBias/index.html b/owl/Owl_neural/S/Graph/Neuron/LinearNoBias/index.html deleted file mode 100644 index 764d08e63..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/LinearNoBias/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -LinearNoBias (owl.Owl_neural.S.Graph.Neuron.LinearNoBias)

Module Neuron.LinearNoBias

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.LinearNoBias.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable init_typ : Init.typ;
  3. mutable in_shape : int array;
  4. mutable out_shape : int array;
}
val create : ?inputs:int -> int -> Init.typ -> neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Masking/index.html b/owl/Owl_neural/S/Graph/Neuron/Masking/index.html deleted file mode 100644 index 240e34464..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Masking/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Masking (owl.Owl_neural.S.Graph.Neuron.Masking)

Module Neuron.Masking

\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Max/index.html b/owl/Owl_neural/S/Graph/Neuron/Max/index.html deleted file mode 100644 index f87afdaee..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Max/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Max (owl.Owl_neural.S.Graph.Neuron.Max)

Module Neuron.Max

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Max.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/MaxPool1D/index.html b/owl/Owl_neural/S/Graph/Neuron/MaxPool1D/index.html deleted file mode 100644 index e0c426e2e..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/MaxPool1D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -MaxPool1D (owl.Owl_neural.S.Graph.Neuron.MaxPool1D)

Module Neuron.MaxPool1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.MaxPool1D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/MaxPool2D/index.html b/owl/Owl_neural/S/Graph/Neuron/MaxPool2D/index.html deleted file mode 100644 index 8174f677d..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/MaxPool2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -MaxPool2D (owl.Owl_neural.S.Graph.Neuron.MaxPool2D)

Module Neuron.MaxPool2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.MaxPool2D.neuron_typ = - {
  1. mutable padding : Owl_types.padding;
  2. mutable kernel : int array;
  3. mutable stride : int array;
  4. mutable in_shape : int array;
  5. mutable out_shape : int array;
}
val create : Owl_types.padding -> int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Mul/index.html b/owl/Owl_neural/S/Graph/Neuron/Mul/index.html deleted file mode 100644 index a32cfe4c5..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Mul/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Mul (owl.Owl_neural.S.Graph.Neuron.Mul)

Module Neuron.Mul

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Mul.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : unit -> neuron_typ
val connect : int array array -> neuron_typ -> unit
val copy : 'a -> neuron_typ
val run : Optimise.Algodiff.t array -> 'a -> Optimise.Algodiff.t
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Normalisation/index.html b/owl/Owl_neural/S/Graph/Neuron/Normalisation/index.html deleted file mode 100644 index 8398d273f..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Normalisation/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Normalisation (owl.Owl_neural.S.Graph.Neuron.Normalisation)

Module Neuron.Normalisation

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Normalisation.neuron_typ = - {
  1. mutable axis : int;
  2. mutable beta : Optimise.Algodiff.t;
  3. mutable gamma : Optimise.Algodiff.t;
  4. mutable mu : Optimise.Algodiff.t;
  5. mutable var : Optimise.Algodiff.t;
  6. mutable decay : Optimise.Algodiff.t;
  7. mutable training : bool;
  8. mutable in_shape : int array;
  9. mutable out_shape : int array;
}
val create : - ?training:bool -> - ?decay:float -> - ?mu:Optimise.Algodiff.A.arr -> - ?var:Optimise.Algodiff.A.arr -> - int -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val load_weights : neuron_typ -> Optimise.Algodiff.t array -> unit
val save_weights : neuron_typ -> Optimise.Algodiff.t array
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index ee2dad58e..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index a6f50e5af..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index 2f9682b19..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/index.html deleted file mode 100644 index 0d013b37f..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,160 +0,0 @@ - -A (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.A)

Module Algodiff.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Arr/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Arr/index.html deleted file mode 100644 index b62894ae4..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/index.html deleted file mode 100644 index 6e23516fe..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.Builder)

Module Algodiff.Builder

module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t
module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t
module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t
module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array
module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t
module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index 757a730f2..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index 97941ff89..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index 76b3234e1..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index 82f457289..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index 8d772a393..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index 4d16931aa..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Linalg/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index 426fda061..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t
val logdet : t -> t
val chol : ?upper:bool -> t -> t
val qr : t -> t * t
val lq : t -> t * t
val svd : ?thin:bool -> t -> t * t * t
val sylvester : t -> t -> t -> t
val lyapunov : t -> t -> t
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - t -> - t -> - t
val (/@) : t -> t -> t
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> t -> t -> t
val care : ?diag_r:bool -> t -> t -> t -> t -> t
val dare : ?diag_r:bool -> t -> t -> t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Mat/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Mat/index.html deleted file mode 100644 index 949b209d4..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Maths/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Maths/index.html deleted file mode 100644 index 886f5186f..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t
val (-) : t -> t -> t
val (*) : t -> t -> t
val (/) : t -> t -> t
val (*@) : t -> t -> t
val (**) : t -> t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val kron : t -> t -> t
val dot : t -> t -> t
val pow : t -> t -> t
val atan2 : t -> t -> t
val min2 : t -> t -> t
val max2 : t -> t -> t
val cross_entropy : t -> t -> t
val inv : t -> t
val neg : t -> t
val abs : t -> t
val signum : t -> t
val floor : t -> t
val ceil : t -> t
val round : t -> t
val sqr : t -> t
val sqrt : t -> t
val log : t -> t
val log2 : t -> t
val log10 : t -> t
val exp : t -> t
val sin : t -> t
val cos : t -> t
val tan : t -> t
val sinh : t -> t
val cosh : t -> t
val tanh : t -> t
val asin : t -> t
val acos : t -> t
val atan : t -> t
val asinh : t -> t
val acosh : t -> t
val atanh : t -> t
val sum' : t -> t
val log_sum_exp' : t -> t
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t
val sum : ?axis:int -> ?keep_dims:bool -> t -> t
val sum_reduce : ?axis:int array -> t -> t
val mean : t -> t
val transpose : ?axis:int array -> t -> t
val swap : int -> int -> t -> t
val l1norm' : t -> t
val l2norm' : t -> t
val l2norm_sqr' : t -> t
val sigmoid : t -> t
val relu : t -> t
val dawsn : t -> t
val softplus : t -> t
val softsign : t -> t
val softmax : ?axis:int -> t -> t
val reshape : t -> int array -> t
val flatten : t -> t
val get_item : t -> int -> int -> t
val get_row : t -> int -> t
val concat : axis:int -> t -> t -> t
val split : axis:int -> int array -> t -> t array
val of_arrays : t array array -> t
val to_arrays : t -> t array array
val concatenate : axis:int -> t array -> t
val stack : axis:int -> t array -> t
val get_slice : int list list -> t -> t
val set_slice : int list list -> t -> t -> t
val get_fancy : Owl_types.index list -> t -> t
val set_fancy : Owl_types.index list -> t -> t -> t
val diag : ?k:int -> t -> t
val diagm : ?k:int -> t -> t
val trace : t -> t
val triu : ?k:int -> t -> t
val tril : ?k:int -> t -> t
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/NN/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/NN/index.html deleted file mode 100644 index 768259366..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t
val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val upsampling2d : t -> int array -> t
val pad : ?v:A.elt -> int list list -> t -> t
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/index.html deleted file mode 100644 index 7f3c5d962..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Algodiff/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Algodiff (owl.Owl_neural.S.Graph.Neuron.Optimise.Algodiff)

Module Optimise.Algodiff

module A : sig ... end
type t = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Optimise.Algodiff.t = -
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
val tag : unit -> int
val primal : t -> t
val primal' : t -> t
val zero : t -> t
val reset_zero : t -> t
val tangent : t -> t
val adjref : t -> t Stdlib.ref
val adjval : t -> t
val shape : t -> int array
val is_float : t -> bool
val is_arr : t -> bool
val row_num : t -> int
val col_num : t -> int
val numel : t -> int
val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t
val clip_by_l2norm : A.elt -> t -> t
val copy_primal' : t -> t
val tile : t -> int array -> t
val repeat : t -> int array -> t
val pack_elt : A.elt -> t
val unpack_elt : t -> A.elt
val pack_flt : float -> t
val _f : float -> t
val unpack_flt : t -> float
val pack_arr : A.arr -> t
val unpack_arr : t -> A.arr
val deep_info : t -> string
val type_info : t -> string
val error_binop : string -> t -> t -> 'a
val error_uniop : string -> t -> 'a
val make_forward : t -> t -> int -> t
val make_reverse : t -> int -> t
val reverse_prop : t -> t -> unit
val diff : (t -> t) -> t -> t
val diff' : (t -> t) -> t -> t * t
val grad : (t -> t) -> t -> t
val grad' : (t -> t) -> t -> t * t
val jacobian : (t -> t) -> t -> t
val jacobian' : (t -> t) -> t -> t * t
val jacobianv : (t -> t) -> t -> t -> t
val jacobianv' : (t -> t) -> t -> t -> t * t
val jacobianTv : (t -> t) -> t -> t -> t
val jacobianTv' : (t -> t) -> t -> t -> t * t
val hessian : (t -> t) -> t -> t
val hessian' : (t -> t) -> t -> t * t
val hessianv : (t -> t) -> t -> t -> t
val hessianv' : (t -> t) -> t -> t -> t * t
val laplacian : (t -> t) -> t -> t
val laplacian' : (t -> t) -> t -> t * t
val gradhessian : (t -> t) -> t -> t * t
val gradhessian' : (t -> t) -> t -> t * t * t
val gradhessianv : (t -> t) -> t -> t -> t * t
val gradhessianv' : (t -> t) -> t -> t -> t * t * t
module Builder : sig ... end
module Maths : sig ... end
module Linalg : sig ... end
module NN : sig ... end
module Mat : sig ... end
module Arr : sig ... end
val to_trace : t list -> string
val to_dot : t list -> string
val pp_num : Stdlib.Format.formatter -> t -> unit
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Batch/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Batch/index.html deleted file mode 100644 index 5c6035531..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Batch/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Batch (owl.Owl_neural.S.Graph.Neuron.Optimise.Batch)

Module Optimise.Batch

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Optimise.Batch.typ = -
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic
val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val batches : typ -> Algodiff.t -> int
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Checkpoint/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Checkpoint/index.html deleted file mode 100644 index 3bc242951..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Checkpoint/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Checkpoint (owl.Owl_neural.S.Graph.Neuron.Optimise.Checkpoint)

Module Optimise.Checkpoint

type state = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Optimise.Checkpoint.state = - {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}
type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Optimise.Checkpoint.typ = -
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None
val init_state : int -> float -> state
val default_checkpoint_fun : (string -> 'a) -> 'a
val print_state_info : state -> unit
val print_summary : state -> unit
val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Clipping/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Clipping/index.html deleted file mode 100644 index fd0a4c65b..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Clipping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Clipping (owl.Owl_neural.S.Graph.Neuron.Optimise.Clipping)

Module Optimise.Clipping

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Optimise.Clipping.typ = -
  1. | L2norm of float
  2. | Value of float * float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Gradient/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Gradient/index.html deleted file mode 100644 index 43d5c5058..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Gradient/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Gradient (owl.Owl_neural.S.Graph.Neuron.Optimise.Gradient)

Module Optimise.Gradient

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Optimise.Gradient.typ = -
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton
val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Learning_Rate/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Learning_Rate/index.html deleted file mode 100644 index 044e80c3a..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Learning_Rate (owl.Owl_neural.S.Graph.Neuron.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Optimise.Learning_Rate.typ = -
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t
val default : typ -> typ
val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Loss/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Loss/index.html deleted file mode 100644 index 0855e65b9..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Loss/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Loss (owl.Owl_neural.S.Graph.Neuron.Optimise.Loss)

Module Optimise.Loss

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Optimise.Loss.typ = -
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Momentum/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Momentum/index.html deleted file mode 100644 index cf6d96699..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Momentum/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Momentum (owl.Owl_neural.S.Graph.Neuron.Optimise.Momentum)

Module Optimise.Momentum

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Optimise.Momentum.typ = -
  1. | Standard of float
  2. | Nesterov of float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Params/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Params/index.html deleted file mode 100644 index e1cfcc8c4..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Params/index.html +++ /dev/null @@ -1,16 +0,0 @@ - -Params (owl.Owl_neural.S.Graph.Neuron.Optimise.Params)

Module Optimise.Params

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Optimise.Params.typ = - {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}
val default : unit -> typ
val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Regularisation/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Regularisation/index.html deleted file mode 100644 index 01752cf13..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Regularisation/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Regularisation (owl.Owl_neural.S.Graph.Neuron.Optimise.Regularisation)

Module Optimise.Regularisation

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Optimise.Regularisation.typ = -
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None
val run : typ -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Stopping/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Stopping/index.html deleted file mode 100644 index 8181bb328..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Stopping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Stopping (owl.Owl_neural.S.Graph.Neuron.Optimise.Stopping)

Module Optimise.Stopping

type typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Optimise.Stopping.typ = -
  1. | Const of float
  2. | Early of int * int
  3. | None
val run : typ -> float -> bool
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/Utils/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/Utils/index.html deleted file mode 100644 index 4ce388f9f..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl.Owl_neural.S.Graph.Neuron.Optimise.Utils)

Module Optimise.Utils

val sample_num : Algodiff.t -> int
val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Optimise/index.html b/owl/Owl_neural/S/Graph/Neuron/Optimise/index.html deleted file mode 100644 index 3ff54d18a..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl.Owl_neural.S.Graph.Neuron.Optimise)

Module Neuron.Optimise

module Algodiff : sig ... end
module Utils : sig ... end
module Learning_Rate : sig ... end
module Batch : sig ... end
module Loss : sig ... end
module Gradient : sig ... end
module Momentum : sig ... end
module Regularisation : sig ... end
module Clipping : sig ... end
module Stopping : sig ... end
module Checkpoint : sig ... end
module Params : sig ... end
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Padding1D/index.html b/owl/Owl_neural/S/Graph/Neuron/Padding1D/index.html deleted file mode 100644 index 9a20b91d9..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Padding1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding1D (owl.Owl_neural.S.Graph.Neuron.Padding1D)

Module Neuron.Padding1D

\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Padding2D/index.html b/owl/Owl_neural/S/Graph/Neuron/Padding2D/index.html deleted file mode 100644 index f59022566..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Padding2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Padding2D (owl.Owl_neural.S.Graph.Neuron.Padding2D)

Module Neuron.Padding2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Padding2D.neuron_typ = - {
  1. mutable padding : int array array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int array array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Padding3D/index.html b/owl/Owl_neural/S/Graph/Neuron/Padding3D/index.html deleted file mode 100644 index 6d7201bee..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Padding3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Padding3D (owl.Owl_neural.S.Graph.Neuron.Padding3D)

Module Neuron.Padding3D

\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Recurrent/index.html b/owl/Owl_neural/S/Graph/Neuron/Recurrent/index.html deleted file mode 100644 index 20962817f..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Recurrent/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Recurrent (owl.Owl_neural.S.Graph.Neuron.Recurrent)

Module Neuron.Recurrent

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Recurrent.neuron_typ = - {
  1. mutable whh : Optimise.Algodiff.t;
  2. mutable wxh : Optimise.Algodiff.t;
  3. mutable why : Optimise.Algodiff.t;
  4. mutable bh : Optimise.Algodiff.t;
  5. mutable by : Optimise.Algodiff.t;
  6. mutable h : Optimise.Algodiff.t;
  7. mutable hiddens : int;
  8. mutable act : Activation.typ;
  9. mutable init_typ : Init.typ;
  10. mutable in_shape : int array;
  11. mutable out_shape : int array;
}
val create : - ?time_steps:int -> - ?inputs:int -> - int -> - int -> - Activation.typ -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Reshape/index.html b/owl/Owl_neural/S/Graph/Neuron/Reshape/index.html deleted file mode 100644 index 681b87dd6..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Reshape/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Reshape (owl.Owl_neural.S.Graph.Neuron.Reshape)

Module Neuron.Reshape

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Reshape.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
}
val create : ?inputs:int array -> int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/Slice/index.html b/owl/Owl_neural/S/Graph/Neuron/Slice/index.html deleted file mode 100644 index af480a686..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/Slice/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Slice (owl.Owl_neural.S.Graph.Neuron.Slice)

Module Neuron.Slice

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.Slice.neuron_typ = - {
  1. mutable in_shape : int array;
  2. mutable out_shape : int array;
  3. mutable slice : int list list;
}
val create : int list list -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/TransposeConv1D/index.html b/owl/Owl_neural/S/Graph/Neuron/TransposeConv1D/index.html deleted file mode 100644 index c9b7a0349..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/TransposeConv1D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -TransposeConv1D (owl.Owl_neural.S.Graph.Neuron.TransposeConv1D)

Module Neuron.TransposeConv1D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.TransposeConv1D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/TransposeConv2D/index.html b/owl/Owl_neural/S/Graph/Neuron/TransposeConv2D/index.html deleted file mode 100644 index df1cefe39..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/TransposeConv2D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -TransposeConv2D (owl.Owl_neural.S.Graph.Neuron.TransposeConv2D)

Module Neuron.TransposeConv2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.TransposeConv2D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/TransposeConv3D/index.html b/owl/Owl_neural/S/Graph/Neuron/TransposeConv3D/index.html deleted file mode 100644 index cd5022b80..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/TransposeConv3D/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -TransposeConv3D (owl.Owl_neural.S.Graph.Neuron.TransposeConv3D)

Module Neuron.TransposeConv3D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.TransposeConv3D.neuron_typ = - {
  1. mutable w : Optimise.Algodiff.t;
  2. mutable b : Optimise.Algodiff.t;
  3. mutable kernel : int array;
  4. mutable stride : int array;
  5. mutable padding : Owl_types.padding;
  6. mutable init_typ : Init.typ;
  7. mutable in_shape : int array;
  8. mutable out_shape : int array;
}
val create : - ?inputs:int array -> - Owl_types.padding -> - int array -> - int array -> - Init.typ -> - neuron_typ
val connect : int array -> neuron_typ -> unit
val init : neuron_typ -> unit
val reset : neuron_typ -> unit
val mktag : int -> neuron_typ -> unit
val mkpar : neuron_typ -> Optimise.Algodiff.t array
val mkpri : neuron_typ -> Optimise.Algodiff.t array
val mkadj : neuron_typ -> Optimise.Algodiff.t array
val update : neuron_typ -> Optimise.Algodiff.t array -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/UpSampling1D/index.html b/owl/Owl_neural/S/Graph/Neuron/UpSampling1D/index.html deleted file mode 100644 index 19a8db73d..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/UpSampling1D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling1D (owl.Owl_neural.S.Graph.Neuron.UpSampling1D)

Module Neuron.UpSampling1D

\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/UpSampling2D/index.html b/owl/Owl_neural/S/Graph/Neuron/UpSampling2D/index.html deleted file mode 100644 index 3f625df19..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/UpSampling2D/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -UpSampling2D (owl.Owl_neural.S.Graph.Neuron.UpSampling2D)

Module Neuron.UpSampling2D

type neuron_typ = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.UpSampling2D.neuron_typ = - {
  1. mutable size : int array;
  2. mutable in_shape : int array;
  3. mutable out_shape : int array;
}
val create : int array -> neuron_typ
val connect : int array -> neuron_typ -> unit
val copy : neuron_typ -> neuron_typ
val to_string : neuron_typ -> string
val to_name : unit -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/UpSampling3D/index.html b/owl/Owl_neural/S/Graph/Neuron/UpSampling3D/index.html deleted file mode 100644 index 7a7f083d5..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/UpSampling3D/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -UpSampling3D (owl.Owl_neural.S.Graph.Neuron.UpSampling3D)

Module Neuron.UpSampling3D

\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/Neuron/index.html b/owl/Owl_neural/S/Graph/Neuron/index.html deleted file mode 100644 index 7a49e36be..000000000 --- a/owl/Owl_neural/S/Graph/Neuron/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Neuron (owl.Owl_neural.S.Graph.Neuron)

Module Graph.Neuron

module Optimise : sig ... end
module Init : sig ... end
module Input : sig ... end
module Activation : sig ... end
module Linear : sig ... end
module LinearNoBias : sig ... end
module Recurrent : sig ... end
module LSTM : sig ... end
module GRU : sig ... end
module Conv1D : sig ... end
module Conv2D : sig ... end
module Conv3D : sig ... end
module DilatedConv1D : sig ... end
module DilatedConv2D : sig ... end
module DilatedConv3D : sig ... end
module TransposeConv1D : sig ... end
module TransposeConv2D : sig ... end
module TransposeConv3D : sig ... end
module FullyConnected : sig ... end
module MaxPool1D : sig ... end
module MaxPool2D : sig ... end
module AvgPool1D : sig ... end
module AvgPool2D : sig ... end
module GlobalMaxPool1D : sig ... end
module GlobalMaxPool2D : sig ... end
module GlobalAvgPool1D : sig ... end
module GlobalAvgPool2D : sig ... end
module UpSampling1D : sig ... end
module UpSampling2D : sig ... end
module UpSampling3D : sig ... end
module Padding1D : sig ... end
module Padding2D : sig ... end
module Padding3D : sig ... end
module Lambda : sig ... end
module LambdaArray : sig ... end
module Dropout : sig ... end
module Reshape : sig ... end
module Flatten : sig ... end
module Slice : sig ... end
module Add : sig ... end
module Mul : sig ... end
module Dot : sig ... end
module Max : sig ... end
module Average : sig ... end
module Concatenate : sig ... end
module Normalisation : sig ... end
module GaussianNoise : sig ... end
module GaussianDropout : sig ... end
module AlphaDropout : sig ... end
module Embedding : sig ... end
module Masking : sig ... end
type neuron = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).Neuron.neuron = -
  1. | Input of Input.neuron_typ
  2. | Linear of Linear.neuron_typ
  3. | LinearNoBias of LinearNoBias.neuron_typ
  4. | Embedding of Embedding.neuron_typ
  5. | LSTM of LSTM.neuron_typ
  6. | GRU of GRU.neuron_typ
  7. | Recurrent of Recurrent.neuron_typ
  8. | Conv1D of Conv1D.neuron_typ
  9. | Conv2D of Conv2D.neuron_typ
  10. | Conv3D of Conv3D.neuron_typ
  11. | DilatedConv1D of DilatedConv1D.neuron_typ
  12. | DilatedConv2D of DilatedConv2D.neuron_typ
  13. | DilatedConv3D of DilatedConv3D.neuron_typ
  14. | TransposeConv1D of TransposeConv1D.neuron_typ
  15. | TransposeConv2D of TransposeConv2D.neuron_typ
  16. | TransposeConv3D of TransposeConv3D.neuron_typ
  17. | FullyConnected of FullyConnected.neuron_typ
  18. | MaxPool1D of MaxPool1D.neuron_typ
  19. | MaxPool2D of MaxPool2D.neuron_typ
  20. | AvgPool1D of AvgPool1D.neuron_typ
  21. | AvgPool2D of AvgPool2D.neuron_typ
  22. | GlobalMaxPool1D of GlobalMaxPool1D.neuron_typ
  23. | GlobalMaxPool2D of GlobalMaxPool2D.neuron_typ
  24. | GlobalAvgPool1D of GlobalAvgPool1D.neuron_typ
  25. | GlobalAvgPool2D of GlobalAvgPool2D.neuron_typ
  26. | UpSampling2D of UpSampling2D.neuron_typ
  27. | Padding2D of Padding2D.neuron_typ
  28. | Dropout of Dropout.neuron_typ
  29. | Reshape of Reshape.neuron_typ
  30. | Flatten of Flatten.neuron_typ
  31. | Slice of Slice.neuron_typ
  32. | Lambda of Lambda.neuron_typ
  33. | LambdaArray of LambdaArray.neuron_typ
  34. | Activation of Activation.neuron_typ
  35. | GaussianNoise of GaussianNoise.neuron_typ
  36. | GaussianDropout of GaussianDropout.neuron_typ
  37. | AlphaDropout of AlphaDropout.neuron_typ
  38. | Normalisation of Normalisation.neuron_typ
  39. | Add of Add.neuron_typ
  40. | Mul of Mul.neuron_typ
  41. | Dot of Dot.neuron_typ
  42. | Max of Max.neuron_typ
  43. | Average of Average.neuron_typ
  44. | Concatenate of Concatenate.neuron_typ
val get_in_out_shape : neuron -> int array * int array
val get_in_shape : neuron -> int array
val get_out_shape : neuron -> int array
val connect : int array array -> neuron -> unit
val init : neuron -> unit
val reset : neuron -> unit
val mktag : int -> neuron -> unit
val mkpar : neuron -> Optimise.Algodiff.t array
val mkpri : neuron -> Optimise.Algodiff.t array
val mkadj : neuron -> Optimise.Algodiff.t array
val update : neuron -> Optimise.Algodiff.t array -> unit
val load_weights : neuron -> Optimise.Algodiff.t array -> unit
val save_weights : neuron -> Optimise.Algodiff.t array
val copy : neuron -> neuron
val to_string : neuron -> string
val to_name : neuron -> string
\ No newline at end of file diff --git a/owl/Owl_neural/S/Graph/index.html b/owl/Owl_neural/S/Graph/index.html deleted file mode 100644 index 1139fce8f..000000000 --- a/owl/Owl_neural/S/Graph/index.html +++ /dev/null @@ -1,245 +0,0 @@ - -Graph (owl.Owl_neural.S.Graph)

Module S.Graph

module Neuron : sig ... end
type node = Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).node = {
  1. mutable name : string;
  2. mutable prev : node array;
  3. mutable next : node array;
  4. mutable neuron : Neuron.neuron;
  5. mutable output : Neuron.Optimise.Algodiff.t option;
  6. mutable network : network;
  7. mutable train : bool;
}
and network = - Owl_neural_generic.Make_Embedded(Owl_algodiff_primal_ops.S).network = - {
  1. mutable nnid : string;
  2. mutable size : int;
  3. mutable roots : node array;
  4. mutable outputs : node array;
  5. mutable topo : node array;
}
val make_network : ?nnid:string -> int -> node array -> node array -> network
val make_node : - ?name:string -> - ?train:bool -> - node array -> - node array -> - Neuron.neuron -> - Neuron.Optimise.Algodiff.t option -> - network -> - node
val get_roots : network -> node array
val get_outputs : network -> node array
val get_node : network -> string -> node
val get_network : ?name:string -> node -> network
val outputs : ?name:string -> node array -> network
val get_network_name : network -> string
val set_network_name : network -> string -> unit
val collect_output : node array -> Neuron.Optimise.Algodiff.t array
val connect_pair : node -> node -> unit
val connect_to_parents : node array -> node -> unit
val add_node : - ?act_typ:Neuron.Activation.typ -> - network -> - node array -> - node -> - node
val input_shape : network -> int array
val input_shapes : network -> int array array
val init : network -> unit
val reset : network -> unit
val mktag : int -> network -> unit
val mkpar : network -> Neuron.Optimise.Algodiff.t array array
val mkpri : network -> Neuron.Optimise.Algodiff.t array array
val mkadj : network -> Neuron.Optimise.Algodiff.t array array
val update : network -> Neuron.Optimise.Algodiff.t array array -> unit
val run_inputs : - Neuron.Optimise.Algodiff.t array -> - network -> - Neuron.Optimise.Algodiff.t array
val forward_inputs : - network -> - Neuron.Optimise.Algodiff.t array -> - Neuron.Optimise.Algodiff.t array * Neuron.Optimise.Algodiff.t array array
val backward : - network -> - Neuron.Optimise.Algodiff.t -> - Neuron.Optimise.Algodiff.t array array - * Neuron.Optimise.Algodiff.t array array
val copy : network -> network
val model_inputs : - network -> - Neuron.Optimise.Algodiff.A.arr array -> - Neuron.Optimise.Algodiff.A.arr array
val input : ?name:string -> int array -> node
val inputs : ?names:string array -> int array array -> node array
val activation : ?name:string -> Neuron.Activation.typ -> node -> node
val linear : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val linear_nobias : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val embedding : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node
val recurrent : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - act_typ:Neuron.Activation.typ -> - int -> - int -> - node -> - node
val lstm : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node
val gru : ?name:string -> ?init_typ:Neuron.Init.typ -> int -> node -> node
val conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val dilated_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val dilated_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val dilated_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - int array -> - node -> - node
val transpose_conv1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val transpose_conv2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val transpose_conv3d : - ?name:string -> - ?padding:Owl_types.padding -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val fully_connected : - ?name:string -> - ?init_typ:Neuron.Init.typ -> - ?act_typ:Neuron.Activation.typ -> - int -> - node -> - node
val max_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val max_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val avg_pool1d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val avg_pool2d : - ?name:string -> - ?padding:Owl_types.padding -> - ?act_typ:Neuron.Activation.typ -> - int array -> - int array -> - node -> - node
val global_max_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_max_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_avg_pool1d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val global_avg_pool2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node -> - node
val upsampling2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - node -> - node
val padding2d : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array array -> - node -> - node
val dropout : ?name:string -> float -> node -> node
val gaussian_noise : ?name:string -> float -> node -> node
val gaussian_dropout : ?name:string -> float -> node -> node
val alpha_dropout : ?name:string -> float -> node -> node
val normalisation : - ?name:string -> - ?axis:int -> - ?training:bool -> - ?decay:float -> - ?mu:Neuron.Optimise.Algodiff.A.arr -> - ?var:Neuron.Optimise.Algodiff.A.arr -> - node -> - node
val reshape : ?name:string -> int array -> node -> node
val flatten : ?name:string -> node -> node
val slice : ?name:string -> int list list -> node -> node
val lambda : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - ?out_shape:int array -> - (Neuron.Optimise.Algodiff.t -> Neuron.Optimise.Algodiff.t) -> - node -> - node
val lambda_array : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int array -> - (Neuron.Optimise.Algodiff.t array -> Neuron.Optimise.Algodiff.t) -> - node array -> - node
val add : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val mul : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val dot : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val max : ?name:string -> ?act_typ:Neuron.Activation.typ -> node array -> node
val average : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - node array -> - node
val concatenate : - ?name:string -> - ?act_typ:Neuron.Activation.typ -> - int -> - node array -> - node
val to_string : network -> string
val pp_network : Stdlib.Format.formatter -> network -> unit
val print : network -> unit
val save : ?unsafe:bool -> network -> string -> unit
val load : string -> network
val save_weights : network -> string -> unit
val load_weights : network -> string -> unit
val make_subnetwork : - ?copy:bool -> - ?make_inputs:string array -> - network -> - string array -> - network
\ No newline at end of file diff --git a/owl/Owl_neural/S/index.html b/owl/Owl_neural/S/index.html deleted file mode 100644 index b762cd557..000000000 --- a/owl/Owl_neural/S/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -S (owl.Owl_neural.S)

Module Owl_neural.S

include sig ... end
module Graph : sig ... end
module Optimise = Graph.Neuron.Optimise
module Init = Graph.Neuron.Init
module Activation = Graph.Neuron.Activation
module Regularisation = Graph.Neuron.Optimise.Regularisation
\ No newline at end of file diff --git a/owl/Owl_neural_parallel/.dummy b/owl/Owl_neural_parallel/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_neural_parallel/Make/argument-1-M/index.html b/owl/Owl_neural_parallel/Make/argument-1-M/index.html deleted file mode 100644 index ce999513d..000000000 --- a/owl/Owl_neural_parallel/Make/argument-1-M/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -M (owl.Owl_neural_parallel.Make.M)

Parameter Make.M

type network
val mkpar : network -> Owl_algodiff.S.t array array
val init : network -> unit
val update : network -> Owl_algodiff.S.t array array -> unit
val copy : network -> network
val train_generic : - ?state:Owl_optimise.S.Checkpoint.state -> - ?params:Owl_optimise.S.Params.typ -> - ?init_model:bool -> - network -> - Owl_algodiff.S.t -> - Owl_algodiff.S.t -> - Owl_optimise.S.Checkpoint.state
\ No newline at end of file diff --git a/owl/Owl_neural_parallel/Make/argument-2-E/index.html b/owl/Owl_neural_parallel/Make/argument-2-E/index.html deleted file mode 100644 index 0a08139c6..000000000 --- a/owl/Owl_neural_parallel/Make/argument-2-E/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -E (owl.Owl_neural_parallel.Make.E)

Parameter Make.E

type param_context
type barrier =
  1. | ASP
  2. | BSP
  3. | SSP
  4. | PSP
val get : 'a -> 'b * int
val set : 'a -> 'b -> unit
val worker_num : unit -> int
val start : ?barrier:barrier -> string -> string -> unit
val register_barrier : (param_context Stdlib.ref -> int * string list) -> unit
val register_schedule : ('a list -> ('a * ('b * 'c) list) list) -> unit
val register_pull : (('a * 'b) list -> ('a * 'c) list) -> unit
val register_push : ('a -> ('b * 'c) list -> ('b * 'c) list) -> unit
val register_stop : (param_context Stdlib.ref -> bool) -> unit
\ No newline at end of file diff --git a/owl/Owl_neural_parallel/Make/index.html b/owl/Owl_neural_parallel/Make/index.html deleted file mode 100644 index 9b0138b99..000000000 --- a/owl/Owl_neural_parallel/Make/index.html +++ /dev/null @@ -1,22 +0,0 @@ - -Make (owl.Owl_neural_parallel.Make)

Module Owl_neural_parallel.Make

Parameters

module M : ModelSig
module E : EngineSig

Signature

type task = {
  1. mutable id : int;
  2. mutable state : Owl_optimise.S.Checkpoint.state option;
  3. mutable params : Owl_optimise.S.Params.typ;
  4. mutable model : M.network;
  5. mutable data_x : Owl_algodiff.S.t;
  6. mutable data_y : Owl_algodiff.S.t;
}
val make_task : - int -> - Owl_optimise.S.Params.typ -> - M.network -> - Owl_algodiff.S.t -> - Owl_algodiff.S.t -> - task
val delta_model : M.network -> M.network -> unit
val local_model : task -> 'a
val schedule : task -> 'a list -> ('b * (int * 'c) list) list
val pull : task -> ('a * M.network) list -> ('b * 'c) list
val push : task -> 'a -> ('b * M.network) list -> ('c * M.network) list
val stop : 'a -> 'b -> bool
val train_generic : - ?params:Owl_optimise.S.Params.typ -> - M.network -> - Owl_algodiff.S.t -> - Owl_algodiff.S.t -> - string -> - string -> - unit
val train : - ?params:Owl_optimise.S.Params.typ -> - M.network -> - Owl_algodiff.S.A.arr -> - Owl_algodiff.S.A.arr -> - string -> - string -> - unit
\ No newline at end of file diff --git a/owl/Owl_neural_parallel/module-type-EngineSig/index.html b/owl/Owl_neural_parallel/module-type-EngineSig/index.html deleted file mode 100644 index 1d1f80a78..000000000 --- a/owl/Owl_neural_parallel/module-type-EngineSig/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -EngineSig (owl.Owl_neural_parallel.EngineSig)

Module type Owl_neural_parallel.EngineSig

type param_context
type barrier =
  1. | ASP
  2. | BSP
  3. | SSP
  4. | PSP
val get : 'a -> 'b * int
val set : 'a -> 'b -> unit
val worker_num : unit -> int
val start : ?barrier:barrier -> string -> string -> unit
val register_barrier : (param_context Stdlib.ref -> int * string list) -> unit
val register_schedule : ('a list -> ('a * ('b * 'c) list) list) -> unit
val register_pull : (('a * 'b) list -> ('a * 'c) list) -> unit
val register_push : ('a -> ('b * 'c) list -> ('b * 'c) list) -> unit
val register_stop : (param_context Stdlib.ref -> bool) -> unit
\ No newline at end of file diff --git a/owl/Owl_neural_parallel/module-type-ModelSig/index.html b/owl/Owl_neural_parallel/module-type-ModelSig/index.html deleted file mode 100644 index 1112a072a..000000000 --- a/owl/Owl_neural_parallel/module-type-ModelSig/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -ModelSig (owl.Owl_neural_parallel.ModelSig)

Module type Owl_neural_parallel.ModelSig

type network
val mkpar : network -> Owl_algodiff.S.t array array
val init : network -> unit
val update : network -> Owl_algodiff.S.t array array -> unit
val copy : network -> network
val train_generic : - ?state:Owl_optimise.S.Checkpoint.state -> - ?params:Owl_optimise.S.Params.typ -> - ?init_model:bool -> - network -> - Owl_algodiff.S.t -> - Owl_algodiff.S.t -> - Owl_optimise.S.Checkpoint.state
\ No newline at end of file diff --git a/owl/Owl_nlp/.dummy b/owl/Owl_nlp/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_nlp_corpus/.dummy b/owl/Owl_nlp_corpus/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_nlp_lda/.dummy b/owl/Owl_nlp_lda/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_nlp_similarity/.dummy b/owl/Owl_nlp_similarity/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_nlp_tfidf/.dummy b/owl/Owl_nlp_tfidf/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_nlp_utils/.dummy b/owl/Owl_nlp_utils/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_nlp_vocabulary/.dummy b/owl/Owl_nlp_vocabulary/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_optimise/.dummy b/owl/Owl_optimise/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_optimise/D/Algodiff/A/Linalg/index.html b/owl/Owl_optimise/D/Algodiff/A/Linalg/index.html deleted file mode 100644 index 2dad5e619..000000000 --- a/owl/Owl_optimise/D/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_optimise.D.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/A/Mat/index.html b/owl/Owl_optimise/D/Algodiff/A/Mat/index.html deleted file mode 100644 index 068a6d39e..000000000 --- a/owl/Owl_optimise/D/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_optimise.D.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/A/Scalar/index.html b/owl/Owl_optimise/D/Algodiff/A/Scalar/index.html deleted file mode 100644 index 21bc033b8..000000000 --- a/owl/Owl_optimise/D/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_optimise.D.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/A/index.html b/owl/Owl_optimise/D/Algodiff/A/index.html deleted file mode 100644 index b6efd7ebf..000000000 --- a/owl/Owl_optimise/D/Algodiff/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl.Owl_optimise.D.Algodiff.A)

Module Algodiff.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/Arr/index.html b/owl/Owl_optimise/D/Algodiff/Arr/index.html deleted file mode 100644 index 8acacd133..000000000 --- a/owl/Owl_optimise/D/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl.Owl_optimise.D.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/Builder/index.html b/owl/Owl_optimise/D/Algodiff/Builder/index.html deleted file mode 100644 index bbf9c2870..000000000 --- a/owl/Owl_optimise/D/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl.Owl_optimise.D.Algodiff.Builder)

Module Algodiff.Builder

module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t
module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t
module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t
module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array
module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t
module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/Builder/module-type-Aiso/index.html b/owl/Owl_optimise/D/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index 23259726e..000000000 --- a/owl/Owl_optimise/D/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl.Owl_optimise.D.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/Builder/module-type-Piso/index.html b/owl/Owl_optimise/D/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index cfad91556..000000000 --- a/owl/Owl_optimise/D/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl.Owl_optimise.D.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/Builder/module-type-Siao/index.html b/owl/Owl_optimise/D/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index 20468fc53..000000000 --- a/owl/Owl_optimise/D/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl.Owl_optimise.D.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/Builder/module-type-Sipo/index.html b/owl/Owl_optimise/D/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index 25ec2fb2a..000000000 --- a/owl/Owl_optimise/D/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl.Owl_optimise.D.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/Builder/module-type-Siso/index.html b/owl/Owl_optimise/D/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index b6aabac29..000000000 --- a/owl/Owl_optimise/D/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl.Owl_optimise.D.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/Builder/module-type-Sito/index.html b/owl/Owl_optimise/D/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index 0ad14a797..000000000 --- a/owl/Owl_optimise/D/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl.Owl_optimise.D.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/Linalg/index.html b/owl/Owl_optimise/D/Algodiff/Linalg/index.html deleted file mode 100644 index 656649de2..000000000 --- a/owl/Owl_optimise/D/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_optimise.D.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t
val logdet : t -> t
val chol : ?upper:bool -> t -> t
val qr : t -> t * t
val lq : t -> t * t
val svd : ?thin:bool -> t -> t * t * t
val sylvester : t -> t -> t -> t
val lyapunov : t -> t -> t
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - t -> - t -> - t
val (/@) : t -> t -> t
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> t -> t -> t
val care : ?diag_r:bool -> t -> t -> t -> t -> t
val dare : ?diag_r:bool -> t -> t -> t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/Mat/index.html b/owl/Owl_optimise/D/Algodiff/Mat/index.html deleted file mode 100644 index 114a17f66..000000000 --- a/owl/Owl_optimise/D/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_optimise.D.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/Maths/index.html b/owl/Owl_optimise/D/Algodiff/Maths/index.html deleted file mode 100644 index 862f8e05b..000000000 --- a/owl/Owl_optimise/D/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl.Owl_optimise.D.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t
val (-) : t -> t -> t
val (*) : t -> t -> t
val (/) : t -> t -> t
val (*@) : t -> t -> t
val (**) : t -> t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val kron : t -> t -> t
val dot : t -> t -> t
val pow : t -> t -> t
val atan2 : t -> t -> t
val min2 : t -> t -> t
val max2 : t -> t -> t
val cross_entropy : t -> t -> t
val inv : t -> t
val neg : t -> t
val abs : t -> t
val signum : t -> t
val floor : t -> t
val ceil : t -> t
val round : t -> t
val sqr : t -> t
val sqrt : t -> t
val log : t -> t
val log2 : t -> t
val log10 : t -> t
val exp : t -> t
val sin : t -> t
val cos : t -> t
val tan : t -> t
val sinh : t -> t
val cosh : t -> t
val tanh : t -> t
val asin : t -> t
val acos : t -> t
val atan : t -> t
val asinh : t -> t
val acosh : t -> t
val atanh : t -> t
val sum' : t -> t
val log_sum_exp' : t -> t
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t
val sum : ?axis:int -> ?keep_dims:bool -> t -> t
val sum_reduce : ?axis:int array -> t -> t
val mean : t -> t
val transpose : ?axis:int array -> t -> t
val swap : int -> int -> t -> t
val l1norm' : t -> t
val l2norm' : t -> t
val l2norm_sqr' : t -> t
val sigmoid : t -> t
val relu : t -> t
val dawsn : t -> t
val softplus : t -> t
val softsign : t -> t
val softmax : ?axis:int -> t -> t
val reshape : t -> int array -> t
val flatten : t -> t
val get_item : t -> int -> int -> t
val get_row : t -> int -> t
val concat : axis:int -> t -> t -> t
val split : axis:int -> int array -> t -> t array
val of_arrays : t array array -> t
val to_arrays : t -> t array array
val concatenate : axis:int -> t array -> t
val stack : axis:int -> t array -> t
val get_slice : int list list -> t -> t
val set_slice : int list list -> t -> t -> t
val get_fancy : Owl_types.index list -> t -> t
val set_fancy : Owl_types.index list -> t -> t -> t
val diag : ?k:int -> t -> t
val diagm : ?k:int -> t -> t
val trace : t -> t
val triu : ?k:int -> t -> t
val tril : ?k:int -> t -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/NN/index.html b/owl/Owl_optimise/D/Algodiff/NN/index.html deleted file mode 100644 index 022b5b0e2..000000000 --- a/owl/Owl_optimise/D/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl.Owl_optimise.D.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t
val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val upsampling2d : t -> int array -> t
val pad : ?v:A.elt -> int list list -> t -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Algodiff/index.html b/owl/Owl_optimise/D/Algodiff/index.html deleted file mode 100644 index b731af528..000000000 --- a/owl/Owl_optimise/D/Algodiff/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Algodiff (owl.Owl_optimise.D.Algodiff)

Module D.Algodiff

module A : sig ... end
type t = Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D).t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
val tag : unit -> int
val primal : t -> t
val primal' : t -> t
val zero : t -> t
val reset_zero : t -> t
val tangent : t -> t
val adjref : t -> t Stdlib.ref
val adjval : t -> t
val shape : t -> int array
val is_float : t -> bool
val is_arr : t -> bool
val row_num : t -> int
val col_num : t -> int
val numel : t -> int
val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t
val clip_by_l2norm : A.elt -> t -> t
val copy_primal' : t -> t
val tile : t -> int array -> t
val repeat : t -> int array -> t
val pack_elt : A.elt -> t
val unpack_elt : t -> A.elt
val pack_flt : float -> t
val _f : float -> t
val unpack_flt : t -> float
val pack_arr : A.arr -> t
val unpack_arr : t -> A.arr
val deep_info : t -> string
val type_info : t -> string
val error_binop : string -> t -> t -> 'a
val error_uniop : string -> t -> 'a
val make_forward : t -> t -> int -> t
val make_reverse : t -> int -> t
val reverse_prop : t -> t -> unit
val diff : (t -> t) -> t -> t
val diff' : (t -> t) -> t -> t * t
val grad : (t -> t) -> t -> t
val grad' : (t -> t) -> t -> t * t
val jacobian : (t -> t) -> t -> t
val jacobian' : (t -> t) -> t -> t * t
val jacobianv : (t -> t) -> t -> t -> t
val jacobianv' : (t -> t) -> t -> t -> t * t
val jacobianTv : (t -> t) -> t -> t -> t
val jacobianTv' : (t -> t) -> t -> t -> t * t
val hessian : (t -> t) -> t -> t
val hessian' : (t -> t) -> t -> t * t
val hessianv : (t -> t) -> t -> t -> t
val hessianv' : (t -> t) -> t -> t -> t * t
val laplacian : (t -> t) -> t -> t
val laplacian' : (t -> t) -> t -> t * t
val gradhessian : (t -> t) -> t -> t * t
val gradhessian' : (t -> t) -> t -> t * t * t
val gradhessianv : (t -> t) -> t -> t -> t * t
val gradhessianv' : (t -> t) -> t -> t -> t * t * t
module Builder : sig ... end
module Maths : sig ... end
module Linalg : sig ... end
module NN : sig ... end
module Mat : sig ... end
module Arr : sig ... end
val to_trace : t list -> string
val to_dot : t list -> string
val pp_num : Stdlib.Format.formatter -> t -> unit
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Batch/index.html b/owl/Owl_optimise/D/Batch/index.html deleted file mode 100644 index fdb54506f..000000000 --- a/owl/Owl_optimise/D/Batch/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Batch (owl.Owl_optimise.D.Batch)

Module D.Batch

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Batch.typ = -
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic
val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val batches : typ -> Algodiff.t -> int
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Checkpoint/index.html b/owl/Owl_optimise/D/Checkpoint/index.html deleted file mode 100644 index 4d7f9116c..000000000 --- a/owl/Owl_optimise/D/Checkpoint/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Checkpoint (owl.Owl_optimise.D.Checkpoint)

Module D.Checkpoint

type state = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Checkpoint.state = - {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}
type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Checkpoint.typ = -
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None
val init_state : int -> float -> state
val default_checkpoint_fun : (string -> 'a) -> 'a
val print_state_info : state -> unit
val print_summary : state -> unit
val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Clipping/index.html b/owl/Owl_optimise/D/Clipping/index.html deleted file mode 100644 index 6420184e1..000000000 --- a/owl/Owl_optimise/D/Clipping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Clipping (owl.Owl_optimise.D.Clipping)

Module D.Clipping

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Clipping.typ = -
  1. | L2norm of float
  2. | Value of float * float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Gradient/index.html b/owl/Owl_optimise/D/Gradient/index.html deleted file mode 100644 index 302450603..000000000 --- a/owl/Owl_optimise/D/Gradient/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Gradient (owl.Owl_optimise.D.Gradient)

Module D.Gradient

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Gradient.typ = -
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton
val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Learning_Rate/index.html b/owl/Owl_optimise/D/Learning_Rate/index.html deleted file mode 100644 index 985aee32e..000000000 --- a/owl/Owl_optimise/D/Learning_Rate/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Learning_Rate (owl.Owl_optimise.D.Learning_Rate)

Module D.Learning_Rate

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Learning_Rate.typ = -
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t
val default : typ -> typ
val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Loss/index.html b/owl/Owl_optimise/D/Loss/index.html deleted file mode 100644 index 3b6571ad9..000000000 --- a/owl/Owl_optimise/D/Loss/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Loss (owl.Owl_optimise.D.Loss)

Module D.Loss

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Loss.typ = -
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Momentum/index.html b/owl/Owl_optimise/D/Momentum/index.html deleted file mode 100644 index dd204cb76..000000000 --- a/owl/Owl_optimise/D/Momentum/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Momentum (owl.Owl_optimise.D.Momentum)

Module D.Momentum

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Momentum.typ = -
  1. | Standard of float
  2. | Nesterov of float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Params/index.html b/owl/Owl_optimise/D/Params/index.html deleted file mode 100644 index bc2d7797c..000000000 --- a/owl/Owl_optimise/D/Params/index.html +++ /dev/null @@ -1,16 +0,0 @@ - -Params (owl.Owl_optimise.D.Params)

Module D.Params

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Params.typ = - {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}
val default : unit -> typ
val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Regularisation/index.html b/owl/Owl_optimise/D/Regularisation/index.html deleted file mode 100644 index 4b323d014..000000000 --- a/owl/Owl_optimise/D/Regularisation/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Regularisation (owl.Owl_optimise.D.Regularisation)

Module D.Regularisation

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Regularisation.typ = -
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None
val run : typ -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Stopping/index.html b/owl/Owl_optimise/D/Stopping/index.html deleted file mode 100644 index b0a68cc5e..000000000 --- a/owl/Owl_optimise/D/Stopping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Stopping (owl.Owl_optimise.D.Stopping)

Module D.Stopping

val run : typ -> float -> bool
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/D/Utils/index.html b/owl/Owl_optimise/D/Utils/index.html deleted file mode 100644 index 243548866..000000000 --- a/owl/Owl_optimise/D/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl.Owl_optimise.D.Utils)

Module D.Utils

val sample_num : Algodiff.t -> int
val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t
\ No newline at end of file diff --git a/owl/Owl_optimise/D/index.html b/owl/Owl_optimise/D/index.html deleted file mode 100644 index 49e02a2bb..000000000 --- a/owl/Owl_optimise/D/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -D (owl.Owl_optimise.D)

Module Owl_optimise.D

module Algodiff : sig ... end
module Utils : sig ... end
module Learning_Rate : sig ... end
module Batch : sig ... end
module Loss : sig ... end
module Gradient : sig ... end
module Momentum : sig ... end
module Regularisation : sig ... end
module Clipping : sig ... end
module Stopping : sig ... end
module Checkpoint : sig ... end
module Params : sig ... end
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/A/Linalg/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/A/Linalg/index.html deleted file mode 100644 index e801acb75..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_optimise.Make_Embedded.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/A/Mat/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/A/Mat/index.html deleted file mode 100644 index 2b631ba02..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_optimise.Make_Embedded.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/A/Scalar/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/A/Scalar/index.html deleted file mode 100644 index 91f7c79fa..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_optimise.Make_Embedded.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/A/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/A/index.html deleted file mode 100644 index 512e9c2be..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl.Owl_optimise.Make_Embedded.Algodiff.A)

Module Algodiff.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/Arr/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/Arr/index.html deleted file mode 100644 index 6c637f23e..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl.Owl_optimise.Make_Embedded.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/index.html deleted file mode 100644 index 4569125f2..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl.Owl_optimise.Make_Embedded.Algodiff.Builder)

Module Algodiff.Builder

module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t
module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t
module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t
module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array
module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t
module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Aiso/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index f9c5d80db..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl.Owl_optimise.Make_Embedded.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Piso/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index 83753799b..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl.Owl_optimise.Make_Embedded.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Siao/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index 1294ab9d5..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl.Owl_optimise.Make_Embedded.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Sipo/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index b855fa3f7..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl.Owl_optimise.Make_Embedded.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Siso/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index b107d708d..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl.Owl_optimise.Make_Embedded.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Sito/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index a75a627c4..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl.Owl_optimise.Make_Embedded.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/Linalg/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/Linalg/index.html deleted file mode 100644 index 06bb11a13..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_optimise.Make_Embedded.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t
val logdet : t -> t
val chol : ?upper:bool -> t -> t
val qr : t -> t * t
val lq : t -> t * t
val svd : ?thin:bool -> t -> t * t * t
val sylvester : t -> t -> t -> t
val lyapunov : t -> t -> t
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - t -> - t -> - t
val (/@) : t -> t -> t
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> t -> t -> t
val care : ?diag_r:bool -> t -> t -> t -> t -> t
val dare : ?diag_r:bool -> t -> t -> t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/Mat/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/Mat/index.html deleted file mode 100644 index cea721c5c..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_optimise.Make_Embedded.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/Maths/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/Maths/index.html deleted file mode 100644 index 9c792fbab..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl.Owl_optimise.Make_Embedded.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t
val (-) : t -> t -> t
val (*) : t -> t -> t
val (/) : t -> t -> t
val (*@) : t -> t -> t
val (**) : t -> t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val kron : t -> t -> t
val dot : t -> t -> t
val pow : t -> t -> t
val atan2 : t -> t -> t
val min2 : t -> t -> t
val max2 : t -> t -> t
val cross_entropy : t -> t -> t
val inv : t -> t
val neg : t -> t
val abs : t -> t
val signum : t -> t
val floor : t -> t
val ceil : t -> t
val round : t -> t
val sqr : t -> t
val sqrt : t -> t
val log : t -> t
val log2 : t -> t
val log10 : t -> t
val exp : t -> t
val sin : t -> t
val cos : t -> t
val tan : t -> t
val sinh : t -> t
val cosh : t -> t
val tanh : t -> t
val asin : t -> t
val acos : t -> t
val atan : t -> t
val asinh : t -> t
val acosh : t -> t
val atanh : t -> t
val sum' : t -> t
val log_sum_exp' : t -> t
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t
val sum : ?axis:int -> ?keep_dims:bool -> t -> t
val sum_reduce : ?axis:int array -> t -> t
val mean : t -> t
val transpose : ?axis:int array -> t -> t
val swap : int -> int -> t -> t
val l1norm' : t -> t
val l2norm' : t -> t
val l2norm_sqr' : t -> t
val sigmoid : t -> t
val relu : t -> t
val dawsn : t -> t
val softplus : t -> t
val softsign : t -> t
val softmax : ?axis:int -> t -> t
val reshape : t -> int array -> t
val flatten : t -> t
val get_item : t -> int -> int -> t
val get_row : t -> int -> t
val concat : axis:int -> t -> t -> t
val split : axis:int -> int array -> t -> t array
val of_arrays : t array array -> t
val to_arrays : t -> t array array
val concatenate : axis:int -> t array -> t
val stack : axis:int -> t array -> t
val get_slice : int list list -> t -> t
val set_slice : int list list -> t -> t -> t
val get_fancy : Owl_types.index list -> t -> t
val set_fancy : Owl_types.index list -> t -> t -> t
val diag : ?k:int -> t -> t
val diagm : ?k:int -> t -> t
val trace : t -> t
val triu : ?k:int -> t -> t
val tril : ?k:int -> t -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/NN/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/NN/index.html deleted file mode 100644 index 3b3e30971..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl.Owl_optimise.Make_Embedded.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t
val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val upsampling2d : t -> int array -> t
val pad : ?v:A.elt -> int list list -> t -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Algodiff/index.html b/owl/Owl_optimise/Make_Embedded/Algodiff/index.html deleted file mode 100644 index 290cf8b7a..000000000 --- a/owl/Owl_optimise/Make_Embedded/Algodiff/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Algodiff (owl.Owl_optimise.Make_Embedded.Algodiff)

Module Make_Embedded.Algodiff

module A : sig ... end
type t = Owl_algodiff_generic.Make(A).t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
val tag : unit -> int
val primal : t -> t
val primal' : t -> t
val zero : t -> t
val reset_zero : t -> t
val tangent : t -> t
val adjref : t -> t Stdlib.ref
val adjval : t -> t
val shape : t -> int array
val is_float : t -> bool
val is_arr : t -> bool
val row_num : t -> int
val col_num : t -> int
val numel : t -> int
val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t
val clip_by_l2norm : A.elt -> t -> t
val copy_primal' : t -> t
val tile : t -> int array -> t
val repeat : t -> int array -> t
val pack_elt : A.elt -> t
val unpack_elt : t -> A.elt
val pack_flt : float -> t
val _f : float -> t
val unpack_flt : t -> float
val pack_arr : A.arr -> t
val unpack_arr : t -> A.arr
val deep_info : t -> string
val type_info : t -> string
val error_binop : string -> t -> t -> 'a
val error_uniop : string -> t -> 'a
val make_forward : t -> t -> int -> t
val make_reverse : t -> int -> t
val reverse_prop : t -> t -> unit
val diff : (t -> t) -> t -> t
val diff' : (t -> t) -> t -> t * t
val grad : (t -> t) -> t -> t
val grad' : (t -> t) -> t -> t * t
val jacobian : (t -> t) -> t -> t
val jacobian' : (t -> t) -> t -> t * t
val jacobianv : (t -> t) -> t -> t -> t
val jacobianv' : (t -> t) -> t -> t -> t * t
val jacobianTv : (t -> t) -> t -> t -> t
val jacobianTv' : (t -> t) -> t -> t -> t * t
val hessian : (t -> t) -> t -> t
val hessian' : (t -> t) -> t -> t * t
val hessianv : (t -> t) -> t -> t -> t
val hessianv' : (t -> t) -> t -> t -> t * t
val laplacian : (t -> t) -> t -> t
val laplacian' : (t -> t) -> t -> t * t
val gradhessian : (t -> t) -> t -> t * t
val gradhessian' : (t -> t) -> t -> t * t * t
val gradhessianv : (t -> t) -> t -> t -> t * t
val gradhessianv' : (t -> t) -> t -> t -> t * t * t
module Builder : sig ... end
module Maths : sig ... end
module Linalg : sig ... end
module NN : sig ... end
module Mat : sig ... end
module Arr : sig ... end
val to_trace : t list -> string
val to_dot : t list -> string
val pp_num : Stdlib.Format.formatter -> t -> unit
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Batch/index.html b/owl/Owl_optimise/Make_Embedded/Batch/index.html deleted file mode 100644 index 43c046e09..000000000 --- a/owl/Owl_optimise/Make_Embedded/Batch/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Batch (owl.Owl_optimise.Make_Embedded.Batch)

Module Make_Embedded.Batch

type typ = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Batch.typ =
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic
val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val batches : typ -> Algodiff.t -> int
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Checkpoint/index.html b/owl/Owl_optimise/Make_Embedded/Checkpoint/index.html deleted file mode 100644 index 85d9d9e13..000000000 --- a/owl/Owl_optimise/Make_Embedded/Checkpoint/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Checkpoint (owl.Owl_optimise.Make_Embedded.Checkpoint)

Module Make_Embedded.Checkpoint

type state = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Checkpoint.state = - {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}
type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Checkpoint.typ = -
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None
val init_state : int -> float -> state
val default_checkpoint_fun : (string -> 'a) -> 'a
val print_state_info : state -> unit
val print_summary : state -> unit
val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Clipping/index.html b/owl/Owl_optimise/Make_Embedded/Clipping/index.html deleted file mode 100644 index 3da754f63..000000000 --- a/owl/Owl_optimise/Make_Embedded/Clipping/index.html +++ /dev/null @@ -1,3 +0,0 @@ - -Clipping (owl.Owl_optimise.Make_Embedded.Clipping)

Module Make_Embedded.Clipping

type typ = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Clipping.typ = -
  1. | L2norm of float
  2. | Value of float * float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Gradient/index.html b/owl/Owl_optimise/Make_Embedded/Gradient/index.html deleted file mode 100644 index 382f97db1..000000000 --- a/owl/Owl_optimise/Make_Embedded/Gradient/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Gradient (owl.Owl_optimise.Make_Embedded.Gradient)

Module Make_Embedded.Gradient

type typ = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Gradient.typ = -
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton
val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Learning_Rate/index.html b/owl/Owl_optimise/Make_Embedded/Learning_Rate/index.html deleted file mode 100644 index be7e5aafe..000000000 --- a/owl/Owl_optimise/Make_Embedded/Learning_Rate/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Learning_Rate (owl.Owl_optimise.Make_Embedded.Learning_Rate)

Module Make_Embedded.Learning_Rate

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Learning_Rate.typ = -
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t
val default : typ -> typ
val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Loss/index.html b/owl/Owl_optimise/Make_Embedded/Loss/index.html deleted file mode 100644 index d2398ecc0..000000000 --- a/owl/Owl_optimise/Make_Embedded/Loss/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Loss (owl.Owl_optimise.Make_Embedded.Loss)

Module Make_Embedded.Loss

type typ = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Loss.typ =
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Momentum/index.html b/owl/Owl_optimise/Make_Embedded/Momentum/index.html deleted file mode 100644 index 826b27b30..000000000 --- a/owl/Owl_optimise/Make_Embedded/Momentum/index.html +++ /dev/null @@ -1,3 +0,0 @@ - -Momentum (owl.Owl_optimise.Make_Embedded.Momentum)

Module Make_Embedded.Momentum

type typ = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Momentum.typ = -
  1. | Standard of float
  2. | Nesterov of float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Params/index.html b/owl/Owl_optimise/Make_Embedded/Params/index.html deleted file mode 100644 index bf3be0306..000000000 --- a/owl/Owl_optimise/Make_Embedded/Params/index.html +++ /dev/null @@ -1,14 +0,0 @@ - -Params (owl.Owl_optimise.Make_Embedded.Params)

Module Make_Embedded.Params

type typ = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Params.typ = {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}
val default : unit -> typ
val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Regularisation/index.html b/owl/Owl_optimise/Make_Embedded/Regularisation/index.html deleted file mode 100644 index a33d32888..000000000 --- a/owl/Owl_optimise/Make_Embedded/Regularisation/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Regularisation (owl.Owl_optimise.Make_Embedded.Regularisation)

Module Make_Embedded.Regularisation

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Regularisation.typ = -
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None
val run : typ -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Stopping/index.html b/owl/Owl_optimise/Make_Embedded/Stopping/index.html deleted file mode 100644 index c110bdbe3..000000000 --- a/owl/Owl_optimise/Make_Embedded/Stopping/index.html +++ /dev/null @@ -1,3 +0,0 @@ - -Stopping (owl.Owl_optimise.Make_Embedded.Stopping)

Module Make_Embedded.Stopping

type typ = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Stopping.typ = -
  1. | Const of float
  2. | Early of int * int
  3. | None
val run : typ -> float -> bool
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/Utils/index.html b/owl/Owl_optimise/Make_Embedded/Utils/index.html deleted file mode 100644 index 20ae98b45..000000000 --- a/owl/Owl_optimise/Make_Embedded/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl.Owl_optimise.Make_Embedded.Utils)

Module Make_Embedded.Utils

val sample_num : Algodiff.t -> int
val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/argument-1-A/Linalg/index.html b/owl/Owl_optimise/Make_Embedded/argument-1-A/Linalg/index.html deleted file mode 100644 index 6b2b74ec6..000000000 --- a/owl/Owl_optimise/Make_Embedded/argument-1-A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_optimise.Make_Embedded.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/argument-1-A/Mat/index.html b/owl/Owl_optimise/Make_Embedded/argument-1-A/Mat/index.html deleted file mode 100644 index 52b473848..000000000 --- a/owl/Owl_optimise/Make_Embedded/argument-1-A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_optimise.Make_Embedded.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/argument-1-A/Scalar/index.html b/owl/Owl_optimise/Make_Embedded/argument-1-A/Scalar/index.html deleted file mode 100644 index e41642ddd..000000000 --- a/owl/Owl_optimise/Make_Embedded/argument-1-A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_optimise.Make_Embedded.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/argument-1-A/index.html b/owl/Owl_optimise/Make_Embedded/argument-1-A/index.html deleted file mode 100644 index 29b0f0f9a..000000000 --- a/owl/Owl_optimise/Make_Embedded/argument-1-A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl.Owl_optimise.Make_Embedded.A)

Parameter Make_Embedded.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_optimise/Make_Embedded/index.html b/owl/Owl_optimise/Make_Embedded/index.html deleted file mode 100644 index b0ff53b5f..000000000 --- a/owl/Owl_optimise/Make_Embedded/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Make_Embedded (owl.Owl_optimise.Make_Embedded)

Module Owl_optimise.Make_Embedded

Parameters

Signature

include sig ... end
module Algodiff : sig ... end
module Utils : sig ... end
module Learning_Rate : sig ... end
module Batch : sig ... end
module Loss : sig ... end
module Gradient : sig ... end
module Momentum : sig ... end
module Regularisation : sig ... end
module Clipping : sig ... end
module Stopping : sig ... end
module Checkpoint : sig ... end
module Params : sig ... end
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/A/Linalg/index.html b/owl/Owl_optimise/S/Algodiff/A/Linalg/index.html deleted file mode 100644 index 7c2cba6f8..000000000 --- a/owl/Owl_optimise/S/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_optimise.S.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/A/Mat/index.html b/owl/Owl_optimise/S/Algodiff/A/Mat/index.html deleted file mode 100644 index be8a81029..000000000 --- a/owl/Owl_optimise/S/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_optimise.S.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/A/Scalar/index.html b/owl/Owl_optimise/S/Algodiff/A/Scalar/index.html deleted file mode 100644 index 565d9d113..000000000 --- a/owl/Owl_optimise/S/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_optimise.S.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/A/index.html b/owl/Owl_optimise/S/Algodiff/A/index.html deleted file mode 100644 index c1c38f57c..000000000 --- a/owl/Owl_optimise/S/Algodiff/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl.Owl_optimise.S.Algodiff.A)

Module Algodiff.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/Arr/index.html b/owl/Owl_optimise/S/Algodiff/Arr/index.html deleted file mode 100644 index 9a37577bc..000000000 --- a/owl/Owl_optimise/S/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl.Owl_optimise.S.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/Builder/index.html b/owl/Owl_optimise/S/Algodiff/Builder/index.html deleted file mode 100644 index 5d1daa0e3..000000000 --- a/owl/Owl_optimise/S/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl.Owl_optimise.S.Algodiff.Builder)

Module Algodiff.Builder

module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t
module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t
module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t
module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array
module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t
module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/Builder/module-type-Aiso/index.html b/owl/Owl_optimise/S/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index bb9726f85..000000000 --- a/owl/Owl_optimise/S/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl.Owl_optimise.S.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/Builder/module-type-Piso/index.html b/owl/Owl_optimise/S/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index 5a59e4f05..000000000 --- a/owl/Owl_optimise/S/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl.Owl_optimise.S.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/Builder/module-type-Siao/index.html b/owl/Owl_optimise/S/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index aa99d9c40..000000000 --- a/owl/Owl_optimise/S/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl.Owl_optimise.S.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/Builder/module-type-Sipo/index.html b/owl/Owl_optimise/S/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index 010e325a9..000000000 --- a/owl/Owl_optimise/S/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl.Owl_optimise.S.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/Builder/module-type-Siso/index.html b/owl/Owl_optimise/S/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index ce3ed3370..000000000 --- a/owl/Owl_optimise/S/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl.Owl_optimise.S.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/Builder/module-type-Sito/index.html b/owl/Owl_optimise/S/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index d5b7f10a4..000000000 --- a/owl/Owl_optimise/S/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl.Owl_optimise.S.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/Linalg/index.html b/owl/Owl_optimise/S/Algodiff/Linalg/index.html deleted file mode 100644 index c66904f9a..000000000 --- a/owl/Owl_optimise/S/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_optimise.S.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t
val logdet : t -> t
val chol : ?upper:bool -> t -> t
val qr : t -> t * t
val lq : t -> t * t
val svd : ?thin:bool -> t -> t * t * t
val sylvester : t -> t -> t -> t
val lyapunov : t -> t -> t
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - t -> - t -> - t
val (/@) : t -> t -> t
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> t -> t -> t
val care : ?diag_r:bool -> t -> t -> t -> t -> t
val dare : ?diag_r:bool -> t -> t -> t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/Mat/index.html b/owl/Owl_optimise/S/Algodiff/Mat/index.html deleted file mode 100644 index 5808310c1..000000000 --- a/owl/Owl_optimise/S/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_optimise.S.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/Maths/index.html b/owl/Owl_optimise/S/Algodiff/Maths/index.html deleted file mode 100644 index b70f57564..000000000 --- a/owl/Owl_optimise/S/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl.Owl_optimise.S.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t
val (-) : t -> t -> t
val (*) : t -> t -> t
val (/) : t -> t -> t
val (*@) : t -> t -> t
val (**) : t -> t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val kron : t -> t -> t
val dot : t -> t -> t
val pow : t -> t -> t
val atan2 : t -> t -> t
val min2 : t -> t -> t
val max2 : t -> t -> t
val cross_entropy : t -> t -> t
val inv : t -> t
val neg : t -> t
val abs : t -> t
val signum : t -> t
val floor : t -> t
val ceil : t -> t
val round : t -> t
val sqr : t -> t
val sqrt : t -> t
val log : t -> t
val log2 : t -> t
val log10 : t -> t
val exp : t -> t
val sin : t -> t
val cos : t -> t
val tan : t -> t
val sinh : t -> t
val cosh : t -> t
val tanh : t -> t
val asin : t -> t
val acos : t -> t
val atan : t -> t
val asinh : t -> t
val acosh : t -> t
val atanh : t -> t
val sum' : t -> t
val log_sum_exp' : t -> t
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t
val sum : ?axis:int -> ?keep_dims:bool -> t -> t
val sum_reduce : ?axis:int array -> t -> t
val mean : t -> t
val transpose : ?axis:int array -> t -> t
val swap : int -> int -> t -> t
val l1norm' : t -> t
val l2norm' : t -> t
val l2norm_sqr' : t -> t
val sigmoid : t -> t
val relu : t -> t
val dawsn : t -> t
val softplus : t -> t
val softsign : t -> t
val softmax : ?axis:int -> t -> t
val reshape : t -> int array -> t
val flatten : t -> t
val get_item : t -> int -> int -> t
val get_row : t -> int -> t
val concat : axis:int -> t -> t -> t
val split : axis:int -> int array -> t -> t array
val of_arrays : t array array -> t
val to_arrays : t -> t array array
val concatenate : axis:int -> t array -> t
val stack : axis:int -> t array -> t
val get_slice : int list list -> t -> t
val set_slice : int list list -> t -> t -> t
val get_fancy : Owl_types.index list -> t -> t
val set_fancy : Owl_types.index list -> t -> t -> t
val diag : ?k:int -> t -> t
val diagm : ?k:int -> t -> t
val trace : t -> t
val triu : ?k:int -> t -> t
val tril : ?k:int -> t -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/NN/index.html b/owl/Owl_optimise/S/Algodiff/NN/index.html deleted file mode 100644 index 178bc51c7..000000000 --- a/owl/Owl_optimise/S/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl.Owl_optimise.S.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t
val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val upsampling2d : t -> int array -> t
val pad : ?v:A.elt -> int list list -> t -> t
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Algodiff/index.html b/owl/Owl_optimise/S/Algodiff/index.html deleted file mode 100644 index 5c91c0f20..000000000 --- a/owl/Owl_optimise/S/Algodiff/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Algodiff (owl.Owl_optimise.S.Algodiff)

Module S.Algodiff

module A : sig ... end
type t = Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S).t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
val tag : unit -> int
val primal : t -> t
val primal' : t -> t
val zero : t -> t
val reset_zero : t -> t
val tangent : t -> t
val adjref : t -> t Stdlib.ref
val adjval : t -> t
val shape : t -> int array
val is_float : t -> bool
val is_arr : t -> bool
val row_num : t -> int
val col_num : t -> int
val numel : t -> int
val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t
val clip_by_l2norm : A.elt -> t -> t
val copy_primal' : t -> t
val tile : t -> int array -> t
val repeat : t -> int array -> t
val pack_elt : A.elt -> t
val unpack_elt : t -> A.elt
val pack_flt : float -> t
val _f : float -> t
val unpack_flt : t -> float
val pack_arr : A.arr -> t
val unpack_arr : t -> A.arr
val deep_info : t -> string
val type_info : t -> string
val error_binop : string -> t -> t -> 'a
val error_uniop : string -> t -> 'a
val make_forward : t -> t -> int -> t
val make_reverse : t -> int -> t
val reverse_prop : t -> t -> unit
val diff : (t -> t) -> t -> t
val diff' : (t -> t) -> t -> t * t
val grad : (t -> t) -> t -> t
val grad' : (t -> t) -> t -> t * t
val jacobian : (t -> t) -> t -> t
val jacobian' : (t -> t) -> t -> t * t
val jacobianv : (t -> t) -> t -> t -> t
val jacobianv' : (t -> t) -> t -> t -> t * t
val jacobianTv : (t -> t) -> t -> t -> t
val jacobianTv' : (t -> t) -> t -> t -> t * t
val hessian : (t -> t) -> t -> t
val hessian' : (t -> t) -> t -> t * t
val hessianv : (t -> t) -> t -> t -> t
val hessianv' : (t -> t) -> t -> t -> t * t
val laplacian : (t -> t) -> t -> t
val laplacian' : (t -> t) -> t -> t * t
val gradhessian : (t -> t) -> t -> t * t
val gradhessian' : (t -> t) -> t -> t * t * t
val gradhessianv : (t -> t) -> t -> t -> t * t
val gradhessianv' : (t -> t) -> t -> t -> t * t * t
module Builder : sig ... end
module Maths : sig ... end
module Linalg : sig ... end
module NN : sig ... end
module Mat : sig ... end
module Arr : sig ... end
val to_trace : t list -> string
val to_dot : t list -> string
val pp_num : Stdlib.Format.formatter -> t -> unit
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Batch/index.html b/owl/Owl_optimise/S/Batch/index.html deleted file mode 100644 index cdfb7c279..000000000 --- a/owl/Owl_optimise/S/Batch/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Batch (owl.Owl_optimise.S.Batch)

Module S.Batch

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Batch.typ = -
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic
val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val batches : typ -> Algodiff.t -> int
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Checkpoint/index.html b/owl/Owl_optimise/S/Checkpoint/index.html deleted file mode 100644 index e64359ff1..000000000 --- a/owl/Owl_optimise/S/Checkpoint/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Checkpoint (owl.Owl_optimise.S.Checkpoint)

Module S.Checkpoint

type state = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Checkpoint.state = - {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}
type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Checkpoint.typ = -
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None
val init_state : int -> float -> state
val default_checkpoint_fun : (string -> 'a) -> 'a
val print_state_info : state -> unit
val print_summary : state -> unit
val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Clipping/index.html b/owl/Owl_optimise/S/Clipping/index.html deleted file mode 100644 index 2a8533ba2..000000000 --- a/owl/Owl_optimise/S/Clipping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Clipping (owl.Owl_optimise.S.Clipping)

Module S.Clipping

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Clipping.typ = -
  1. | L2norm of float
  2. | Value of float * float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Gradient/index.html b/owl/Owl_optimise/S/Gradient/index.html deleted file mode 100644 index 7670ad8ab..000000000 --- a/owl/Owl_optimise/S/Gradient/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Gradient (owl.Owl_optimise.S.Gradient)

Module S.Gradient

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Gradient.typ = -
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton
val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Learning_Rate/index.html b/owl/Owl_optimise/S/Learning_Rate/index.html deleted file mode 100644 index 825b5e1a2..000000000 --- a/owl/Owl_optimise/S/Learning_Rate/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Learning_Rate (owl.Owl_optimise.S.Learning_Rate)

Module S.Learning_Rate

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Learning_Rate.typ = -
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t
val default : typ -> typ
val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Loss/index.html b/owl/Owl_optimise/S/Loss/index.html deleted file mode 100644 index 407c6c761..000000000 --- a/owl/Owl_optimise/S/Loss/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Loss (owl.Owl_optimise.S.Loss)

Module S.Loss

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Loss.typ = -
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Momentum/index.html b/owl/Owl_optimise/S/Momentum/index.html deleted file mode 100644 index c8578b30b..000000000 --- a/owl/Owl_optimise/S/Momentum/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Momentum (owl.Owl_optimise.S.Momentum)

Module S.Momentum

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Momentum.typ = -
  1. | Standard of float
  2. | Nesterov of float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Params/index.html b/owl/Owl_optimise/S/Params/index.html deleted file mode 100644 index 473067cbf..000000000 --- a/owl/Owl_optimise/S/Params/index.html +++ /dev/null @@ -1,16 +0,0 @@ - -Params (owl.Owl_optimise.S.Params)

Module S.Params

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Params.typ = - {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}
val default : unit -> typ
val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Regularisation/index.html b/owl/Owl_optimise/S/Regularisation/index.html deleted file mode 100644 index 620f38d16..000000000 --- a/owl/Owl_optimise/S/Regularisation/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Regularisation (owl.Owl_optimise.S.Regularisation)

Module S.Regularisation

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Regularisation.typ = -
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None
val run : typ -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Stopping/index.html b/owl/Owl_optimise/S/Stopping/index.html deleted file mode 100644 index ef760b970..000000000 --- a/owl/Owl_optimise/S/Stopping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Stopping (owl.Owl_optimise.S.Stopping)

Module S.Stopping

val run : typ -> float -> bool
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_optimise/S/Utils/index.html b/owl/Owl_optimise/S/Utils/index.html deleted file mode 100644 index da0313c75..000000000 --- a/owl/Owl_optimise/S/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl.Owl_optimise.S.Utils)

Module S.Utils

val sample_num : Algodiff.t -> int
val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t
\ No newline at end of file diff --git a/owl/Owl_optimise/S/index.html b/owl/Owl_optimise/S/index.html deleted file mode 100644 index ef8a793dc..000000000 --- a/owl/Owl_optimise/S/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -S (owl.Owl_optimise.S)

Module Owl_optimise.S

module Algodiff : sig ... end
module Utils : sig ... end
module Learning_Rate : sig ... end
module Batch : sig ... end
module Loss : sig ... end
module Gradient : sig ... end
module Momentum : sig ... end
module Regularisation : sig ... end
module Clipping : sig ... end
module Stopping : sig ... end
module Checkpoint : sig ... end
module Params : sig ... end
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
\ No newline at end of file diff --git a/owl/Owl_regression/.dummy b/owl/Owl_regression/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_regression/D/Optimise/Algodiff/A/Linalg/index.html b/owl/Owl_regression/D/Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index 0ec7ce034..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_regression.D.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/A/Mat/index.html b/owl/Owl_regression/D/Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index 2cd811684..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_regression.D.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/A/Scalar/index.html b/owl/Owl_regression/D/Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index 6bdb3cce1..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_regression.D.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/A/index.html b/owl/Owl_regression/D/Optimise/Algodiff/A/index.html deleted file mode 100644 index 65c308385..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,160 +0,0 @@ - -A (owl.Owl_regression.D.Optimise.Algodiff.A)

Module Algodiff.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/Arr/index.html b/owl/Owl_regression/D/Optimise/Algodiff/Arr/index.html deleted file mode 100644 index b7f2a6ebd..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl.Owl_regression.D.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/Builder/index.html b/owl/Owl_regression/D/Optimise/Algodiff/Builder/index.html deleted file mode 100644 index 2d64d529d..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl.Owl_regression.D.Optimise.Algodiff.Builder)

Module Algodiff.Builder

module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t
module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t
module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t
module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array
module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t
module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index 040ccbecd..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl.Owl_regression.D.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index 0b5567aca..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl.Owl_regression.D.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index f5d51217e..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl.Owl_regression.D.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index 1125d0f5c..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl.Owl_regression.D.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index dc97480ac..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl.Owl_regression.D.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index 67e446a55..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl.Owl_regression.D.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/Linalg/index.html b/owl/Owl_regression/D/Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index 59b3f35a1..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_regression.D.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t
val logdet : t -> t
val chol : ?upper:bool -> t -> t
val qr : t -> t * t
val lq : t -> t * t
val svd : ?thin:bool -> t -> t * t * t
val sylvester : t -> t -> t -> t
val lyapunov : t -> t -> t
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - t -> - t -> - t
val (/@) : t -> t -> t
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> t -> t -> t
val care : ?diag_r:bool -> t -> t -> t -> t -> t
val dare : ?diag_r:bool -> t -> t -> t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/Mat/index.html b/owl/Owl_regression/D/Optimise/Algodiff/Mat/index.html deleted file mode 100644 index 5a2d633be..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_regression.D.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/Maths/index.html b/owl/Owl_regression/D/Optimise/Algodiff/Maths/index.html deleted file mode 100644 index 210f10f81..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl.Owl_regression.D.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t
val (-) : t -> t -> t
val (*) : t -> t -> t
val (/) : t -> t -> t
val (*@) : t -> t -> t
val (**) : t -> t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val kron : t -> t -> t
val dot : t -> t -> t
val pow : t -> t -> t
val atan2 : t -> t -> t
val min2 : t -> t -> t
val max2 : t -> t -> t
val cross_entropy : t -> t -> t
val inv : t -> t
val neg : t -> t
val abs : t -> t
val signum : t -> t
val floor : t -> t
val ceil : t -> t
val round : t -> t
val sqr : t -> t
val sqrt : t -> t
val log : t -> t
val log2 : t -> t
val log10 : t -> t
val exp : t -> t
val sin : t -> t
val cos : t -> t
val tan : t -> t
val sinh : t -> t
val cosh : t -> t
val tanh : t -> t
val asin : t -> t
val acos : t -> t
val atan : t -> t
val asinh : t -> t
val acosh : t -> t
val atanh : t -> t
val sum' : t -> t
val log_sum_exp' : t -> t
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t
val sum : ?axis:int -> ?keep_dims:bool -> t -> t
val sum_reduce : ?axis:int array -> t -> t
val mean : t -> t
val transpose : ?axis:int array -> t -> t
val swap : int -> int -> t -> t
val l1norm' : t -> t
val l2norm' : t -> t
val l2norm_sqr' : t -> t
val sigmoid : t -> t
val relu : t -> t
val dawsn : t -> t
val softplus : t -> t
val softsign : t -> t
val softmax : ?axis:int -> t -> t
val reshape : t -> int array -> t
val flatten : t -> t
val get_item : t -> int -> int -> t
val get_row : t -> int -> t
val concat : axis:int -> t -> t -> t
val split : axis:int -> int array -> t -> t array
val of_arrays : t array array -> t
val to_arrays : t -> t array array
val concatenate : axis:int -> t array -> t
val stack : axis:int -> t array -> t
val get_slice : int list list -> t -> t
val set_slice : int list list -> t -> t -> t
val get_fancy : Owl_types.index list -> t -> t
val set_fancy : Owl_types.index list -> t -> t -> t
val diag : ?k:int -> t -> t
val diagm : ?k:int -> t -> t
val trace : t -> t
val triu : ?k:int -> t -> t
val tril : ?k:int -> t -> t
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/NN/index.html b/owl/Owl_regression/D/Optimise/Algodiff/NN/index.html deleted file mode 100644 index 9024f59e0..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl.Owl_regression.D.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t
val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val upsampling2d : t -> int array -> t
val pad : ?v:A.elt -> int list list -> t -> t
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Algodiff/index.html b/owl/Owl_regression/D/Optimise/Algodiff/index.html deleted file mode 100644 index 3901d63c4..000000000 --- a/owl/Owl_regression/D/Optimise/Algodiff/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Algodiff (owl.Owl_regression.D.Optimise.Algodiff)

Module Optimise.Algodiff

module A : sig ... end
type t = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Algodiff.t = -
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
val tag : unit -> int
val primal : t -> t
val primal' : t -> t
val zero : t -> t
val reset_zero : t -> t
val tangent : t -> t
val adjref : t -> t Stdlib.ref
val adjval : t -> t
val shape : t -> int array
val is_float : t -> bool
val is_arr : t -> bool
val row_num : t -> int
val col_num : t -> int
val numel : t -> int
val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t
val clip_by_l2norm : A.elt -> t -> t
val copy_primal' : t -> t
val tile : t -> int array -> t
val repeat : t -> int array -> t
val pack_elt : A.elt -> t
val unpack_elt : t -> A.elt
val pack_flt : float -> t
val _f : float -> t
val unpack_flt : t -> float
val pack_arr : A.arr -> t
val unpack_arr : t -> A.arr
val deep_info : t -> string
val type_info : t -> string
val error_binop : string -> t -> t -> 'a
val error_uniop : string -> t -> 'a
val make_forward : t -> t -> int -> t
val make_reverse : t -> int -> t
val reverse_prop : t -> t -> unit
val diff : (t -> t) -> t -> t
val diff' : (t -> t) -> t -> t * t
val grad : (t -> t) -> t -> t
val grad' : (t -> t) -> t -> t * t
val jacobian : (t -> t) -> t -> t
val jacobian' : (t -> t) -> t -> t * t
val jacobianv : (t -> t) -> t -> t -> t
val jacobianv' : (t -> t) -> t -> t -> t * t
val jacobianTv : (t -> t) -> t -> t -> t
val jacobianTv' : (t -> t) -> t -> t -> t * t
val hessian : (t -> t) -> t -> t
val hessian' : (t -> t) -> t -> t * t
val hessianv : (t -> t) -> t -> t -> t
val hessianv' : (t -> t) -> t -> t -> t * t
val laplacian : (t -> t) -> t -> t
val laplacian' : (t -> t) -> t -> t * t
val gradhessian : (t -> t) -> t -> t * t
val gradhessian' : (t -> t) -> t -> t * t * t
val gradhessianv : (t -> t) -> t -> t -> t * t
val gradhessianv' : (t -> t) -> t -> t -> t * t * t
module Builder : sig ... end
module Maths : sig ... end
module Linalg : sig ... end
module NN : sig ... end
module Mat : sig ... end
module Arr : sig ... end
val to_trace : t list -> string
val to_dot : t list -> string
val pp_num : Stdlib.Format.formatter -> t -> unit
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Batch/index.html b/owl/Owl_regression/D/Optimise/Batch/index.html deleted file mode 100644 index d0a2d13ea..000000000 --- a/owl/Owl_regression/D/Optimise/Batch/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Batch (owl.Owl_regression.D.Optimise.Batch)

Module Optimise.Batch

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Batch.typ = -
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic
val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val batches : typ -> Algodiff.t -> int
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Checkpoint/index.html b/owl/Owl_regression/D/Optimise/Checkpoint/index.html deleted file mode 100644 index a01f6612a..000000000 --- a/owl/Owl_regression/D/Optimise/Checkpoint/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Checkpoint (owl.Owl_regression.D.Optimise.Checkpoint)

Module Optimise.Checkpoint

type state = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Checkpoint.state = - {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}
type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Checkpoint.typ = -
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None
val init_state : int -> float -> state
val default_checkpoint_fun : (string -> 'a) -> 'a
val print_state_info : state -> unit
val print_summary : state -> unit
val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Clipping/index.html b/owl/Owl_regression/D/Optimise/Clipping/index.html deleted file mode 100644 index e98d8b9e9..000000000 --- a/owl/Owl_regression/D/Optimise/Clipping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Clipping (owl.Owl_regression.D.Optimise.Clipping)

Module Optimise.Clipping

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Clipping.typ = -
  1. | L2norm of float
  2. | Value of float * float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Gradient/index.html b/owl/Owl_regression/D/Optimise/Gradient/index.html deleted file mode 100644 index 3448b9dd9..000000000 --- a/owl/Owl_regression/D/Optimise/Gradient/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Gradient (owl.Owl_regression.D.Optimise.Gradient)

Module Optimise.Gradient

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Gradient.typ = -
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton
val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Learning_Rate/index.html b/owl/Owl_regression/D/Optimise/Learning_Rate/index.html deleted file mode 100644 index 2baf2abbb..000000000 --- a/owl/Owl_regression/D/Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Learning_Rate (owl.Owl_regression.D.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Learning_Rate.typ = -
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t
val default : typ -> typ
val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Loss/index.html b/owl/Owl_regression/D/Optimise/Loss/index.html deleted file mode 100644 index 445d1fea6..000000000 --- a/owl/Owl_regression/D/Optimise/Loss/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Loss (owl.Owl_regression.D.Optimise.Loss)

Module Optimise.Loss

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Loss.typ = -
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Momentum/index.html b/owl/Owl_regression/D/Optimise/Momentum/index.html deleted file mode 100644 index a7b545fe0..000000000 --- a/owl/Owl_regression/D/Optimise/Momentum/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Momentum (owl.Owl_regression.D.Optimise.Momentum)

Module Optimise.Momentum

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Momentum.typ = -
  1. | Standard of float
  2. | Nesterov of float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Params/index.html b/owl/Owl_regression/D/Optimise/Params/index.html deleted file mode 100644 index 34a19c301..000000000 --- a/owl/Owl_regression/D/Optimise/Params/index.html +++ /dev/null @@ -1,16 +0,0 @@ - -Params (owl.Owl_regression.D.Optimise.Params)

Module Optimise.Params

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Params.typ = - {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}
val default : unit -> typ
val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Regularisation/index.html b/owl/Owl_regression/D/Optimise/Regularisation/index.html deleted file mode 100644 index 0e509a6a9..000000000 --- a/owl/Owl_regression/D/Optimise/Regularisation/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Regularisation (owl.Owl_regression.D.Optimise.Regularisation)

Module Optimise.Regularisation

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.D)).Regularisation.typ = -
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None
val run : typ -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Stopping/index.html b/owl/Owl_regression/D/Optimise/Stopping/index.html deleted file mode 100644 index 383d091e1..000000000 --- a/owl/Owl_regression/D/Optimise/Stopping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Stopping (owl.Owl_regression.D.Optimise.Stopping)

Module Optimise.Stopping

val run : typ -> float -> bool
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/Utils/index.html b/owl/Owl_regression/D/Optimise/Utils/index.html deleted file mode 100644 index b81d899ac..000000000 --- a/owl/Owl_regression/D/Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl.Owl_regression.D.Optimise.Utils)

Module Optimise.Utils

val sample_num : Algodiff.t -> int
val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t
\ No newline at end of file diff --git a/owl/Owl_regression/D/Optimise/index.html b/owl/Owl_regression/D/Optimise/index.html deleted file mode 100644 index ee8456a13..000000000 --- a/owl/Owl_regression/D/Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl.Owl_regression.D.Optimise)

Module D.Optimise

module Algodiff : sig ... end
module Utils : sig ... end
module Learning_Rate : sig ... end
module Batch : sig ... end
module Loss : sig ... end
module Gradient : sig ... end
module Momentum : sig ... end
module Regularisation : sig ... end
module Clipping : sig ... end
module Stopping : sig ... end
module Checkpoint : sig ... end
module Params : sig ... end
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
\ No newline at end of file diff --git a/owl/Owl_regression/D/index.html b/owl/Owl_regression/D/index.html deleted file mode 100644 index 39f4a5daa..000000000 --- a/owl/Owl_regression/D/index.html +++ /dev/null @@ -1,44 +0,0 @@ - -D (owl.Owl_regression.D)

Module Owl_regression.D

module Optimise : sig ... end
val ridge : - ?i:bool -> - ?alpha:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val lasso : - ?i:bool -> - ?alpha:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val elastic_net : - ?i:bool -> - ?alpha:float -> - ?l1_ratio:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val svm : - ?i:bool -> - ?a:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val logistic : - ?i:bool -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/Linalg/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index 8d0d5ecbd..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/Mat/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index b44306f37..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/Scalar/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index a828e2750..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/index.html deleted file mode 100644 index dce9eb9e8..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,160 +0,0 @@ - -A (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.A)

Module Algodiff.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Arr/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Arr/index.html deleted file mode 100644 index 2372a0f2c..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/index.html deleted file mode 100644 index d6ab790c3..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.Builder)

Module Algodiff.Builder

module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t
module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t
module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t
module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array
module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t
module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index 5f171bb0f..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index 63a176a0c..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index d7376966a..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index 3e8137807..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index 0662bf1a2..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index fb5a55b00..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Linalg/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index 82c14a27c..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t
val logdet : t -> t
val chol : ?upper:bool -> t -> t
val qr : t -> t * t
val lq : t -> t * t
val svd : ?thin:bool -> t -> t * t * t
val sylvester : t -> t -> t -> t
val lyapunov : t -> t -> t
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - t -> - t -> - t
val (/@) : t -> t -> t
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> t -> t -> t
val care : ?diag_r:bool -> t -> t -> t -> t -> t
val dare : ?diag_r:bool -> t -> t -> t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Mat/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Mat/index.html deleted file mode 100644 index 711d7f85a..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Maths/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Maths/index.html deleted file mode 100644 index 6b7d01fbd..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t
val (-) : t -> t -> t
val (*) : t -> t -> t
val (/) : t -> t -> t
val (*@) : t -> t -> t
val (**) : t -> t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val kron : t -> t -> t
val dot : t -> t -> t
val pow : t -> t -> t
val atan2 : t -> t -> t
val min2 : t -> t -> t
val max2 : t -> t -> t
val cross_entropy : t -> t -> t
val inv : t -> t
val neg : t -> t
val abs : t -> t
val signum : t -> t
val floor : t -> t
val ceil : t -> t
val round : t -> t
val sqr : t -> t
val sqrt : t -> t
val log : t -> t
val log2 : t -> t
val log10 : t -> t
val exp : t -> t
val sin : t -> t
val cos : t -> t
val tan : t -> t
val sinh : t -> t
val cosh : t -> t
val tanh : t -> t
val asin : t -> t
val acos : t -> t
val atan : t -> t
val asinh : t -> t
val acosh : t -> t
val atanh : t -> t
val sum' : t -> t
val log_sum_exp' : t -> t
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t
val sum : ?axis:int -> ?keep_dims:bool -> t -> t
val sum_reduce : ?axis:int array -> t -> t
val mean : t -> t
val transpose : ?axis:int array -> t -> t
val swap : int -> int -> t -> t
val l1norm' : t -> t
val l2norm' : t -> t
val l2norm_sqr' : t -> t
val sigmoid : t -> t
val relu : t -> t
val dawsn : t -> t
val softplus : t -> t
val softsign : t -> t
val softmax : ?axis:int -> t -> t
val reshape : t -> int array -> t
val flatten : t -> t
val get_item : t -> int -> int -> t
val get_row : t -> int -> t
val concat : axis:int -> t -> t -> t
val split : axis:int -> int array -> t -> t array
val of_arrays : t array array -> t
val to_arrays : t -> t array array
val concatenate : axis:int -> t array -> t
val stack : axis:int -> t array -> t
val get_slice : int list list -> t -> t
val set_slice : int list list -> t -> t -> t
val get_fancy : Owl_types.index list -> t -> t
val set_fancy : Owl_types.index list -> t -> t -> t
val diag : ?k:int -> t -> t
val diagm : ?k:int -> t -> t
val trace : t -> t
val triu : ?k:int -> t -> t
val tril : ?k:int -> t -> t
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/NN/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/NN/index.html deleted file mode 100644 index b5cbdfbd5..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl.Owl_regression.Make_Embedded.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t
val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val upsampling2d : t -> int array -> t
val pad : ?v:A.elt -> int list list -> t -> t
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/index.html deleted file mode 100644 index bcc9e6632..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Algodiff/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Algodiff (owl.Owl_regression.Make_Embedded.Optimise.Algodiff)

Module Optimise.Algodiff

module A : sig ... end
type t = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Algodiff.t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
val tag : unit -> int
val primal : t -> t
val primal' : t -> t
val zero : t -> t
val reset_zero : t -> t
val tangent : t -> t
val adjref : t -> t Stdlib.ref
val adjval : t -> t
val shape : t -> int array
val is_float : t -> bool
val is_arr : t -> bool
val row_num : t -> int
val col_num : t -> int
val numel : t -> int
val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t
val clip_by_l2norm : A.elt -> t -> t
val copy_primal' : t -> t
val tile : t -> int array -> t
val repeat : t -> int array -> t
val pack_elt : A.elt -> t
val unpack_elt : t -> A.elt
val pack_flt : float -> t
val _f : float -> t
val unpack_flt : t -> float
val pack_arr : A.arr -> t
val unpack_arr : t -> A.arr
val deep_info : t -> string
val type_info : t -> string
val error_binop : string -> t -> t -> 'a
val error_uniop : string -> t -> 'a
val make_forward : t -> t -> int -> t
val make_reverse : t -> int -> t
val reverse_prop : t -> t -> unit
val diff : (t -> t) -> t -> t
val diff' : (t -> t) -> t -> t * t
val grad : (t -> t) -> t -> t
val grad' : (t -> t) -> t -> t * t
val jacobian : (t -> t) -> t -> t
val jacobian' : (t -> t) -> t -> t * t
val jacobianv : (t -> t) -> t -> t -> t
val jacobianv' : (t -> t) -> t -> t -> t * t
val jacobianTv : (t -> t) -> t -> t -> t
val jacobianTv' : (t -> t) -> t -> t -> t * t
val hessian : (t -> t) -> t -> t
val hessian' : (t -> t) -> t -> t * t
val hessianv : (t -> t) -> t -> t -> t
val hessianv' : (t -> t) -> t -> t -> t * t
val laplacian : (t -> t) -> t -> t
val laplacian' : (t -> t) -> t -> t * t
val gradhessian : (t -> t) -> t -> t * t
val gradhessian' : (t -> t) -> t -> t * t * t
val gradhessianv : (t -> t) -> t -> t -> t * t
val gradhessianv' : (t -> t) -> t -> t -> t * t * t
module Builder : sig ... end
module Maths : sig ... end
module Linalg : sig ... end
module NN : sig ... end
module Mat : sig ... end
module Arr : sig ... end
val to_trace : t list -> string
val to_dot : t list -> string
val pp_num : Stdlib.Format.formatter -> t -> unit
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Batch/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Batch/index.html deleted file mode 100644 index 935048478..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Batch/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Batch (owl.Owl_regression.Make_Embedded.Optimise.Batch)

Module Optimise.Batch

type typ = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Batch.typ =
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic
val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val batches : typ -> Algodiff.t -> int
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Checkpoint/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Checkpoint/index.html deleted file mode 100644 index 79e130d1b..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Checkpoint/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Checkpoint (owl.Owl_regression.Make_Embedded.Optimise.Checkpoint)

Module Optimise.Checkpoint

type state = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Checkpoint.state = - {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}
type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Checkpoint.typ = -
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None
val init_state : int -> float -> state
val default_checkpoint_fun : (string -> 'a) -> 'a
val print_state_info : state -> unit
val print_summary : state -> unit
val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Clipping/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Clipping/index.html deleted file mode 100644 index 44d3c5552..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Clipping/index.html +++ /dev/null @@ -1,3 +0,0 @@ - -Clipping (owl.Owl_regression.Make_Embedded.Optimise.Clipping)

Module Optimise.Clipping

type typ = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Clipping.typ = -
  1. | L2norm of float
  2. | Value of float * float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Gradient/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Gradient/index.html deleted file mode 100644 index 82ef30b89..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Gradient/index.html +++ /dev/null @@ -1,10 +0,0 @@ - -Gradient (owl.Owl_regression.Make_Embedded.Optimise.Gradient)

Module Optimise.Gradient

type typ = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Gradient.typ = -
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton
val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Learning_Rate/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Learning_Rate/index.html deleted file mode 100644 index dd1b4e08a..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Learning_Rate (owl.Owl_regression.Make_Embedded.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Learning_Rate.typ = -
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t
val default : typ -> typ
val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Loss/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Loss/index.html deleted file mode 100644 index 5bdebc1ed..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Loss/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Loss (owl.Owl_regression.Make_Embedded.Optimise.Loss)

Module Optimise.Loss

type typ = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Loss.typ =
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Momentum/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Momentum/index.html deleted file mode 100644 index f6d09bfbb..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Momentum/index.html +++ /dev/null @@ -1,3 +0,0 @@ - -Momentum (owl.Owl_regression.Make_Embedded.Optimise.Momentum)

Module Optimise.Momentum

type typ = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Momentum.typ = -
  1. | Standard of float
  2. | Nesterov of float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Params/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Params/index.html deleted file mode 100644 index 5eb8e5d30..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Params/index.html +++ /dev/null @@ -1,14 +0,0 @@ - -Params (owl.Owl_regression.Make_Embedded.Optimise.Params)

Module Optimise.Params

type typ = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Params.typ = {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}
val default : unit -> typ
val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Regularisation/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Regularisation/index.html deleted file mode 100644 index 5263ecdde..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Regularisation/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Regularisation (owl.Owl_regression.Make_Embedded.Optimise.Regularisation)

Module Optimise.Regularisation

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Regularisation.typ = -
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None
val run : typ -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Stopping/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Stopping/index.html deleted file mode 100644 index d38319f97..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Stopping/index.html +++ /dev/null @@ -1,3 +0,0 @@ - -Stopping (owl.Owl_regression.Make_Embedded.Optimise.Stopping)

Module Optimise.Stopping

type typ = Owl_optimise_generic.Make(Owl_algodiff_generic.Make(A)).Stopping.typ = -
  1. | Const of float
  2. | Early of int * int
  3. | None
val run : typ -> float -> bool
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/Utils/index.html b/owl/Owl_regression/Make_Embedded/Optimise/Utils/index.html deleted file mode 100644 index 63fbfd4d3..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl.Owl_regression.Make_Embedded.Optimise.Utils)

Module Optimise.Utils

val sample_num : Algodiff.t -> int
val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/Optimise/index.html b/owl/Owl_regression/Make_Embedded/Optimise/index.html deleted file mode 100644 index f0239127c..000000000 --- a/owl/Owl_regression/Make_Embedded/Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl.Owl_regression.Make_Embedded.Optimise)

Module Make_Embedded.Optimise

module Algodiff : sig ... end
module Utils : sig ... end
module Learning_Rate : sig ... end
module Batch : sig ... end
module Loss : sig ... end
module Gradient : sig ... end
module Momentum : sig ... end
module Regularisation : sig ... end
module Clipping : sig ... end
module Stopping : sig ... end
module Checkpoint : sig ... end
module Params : sig ... end
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/argument-1-A/Linalg/index.html b/owl/Owl_regression/Make_Embedded/argument-1-A/Linalg/index.html deleted file mode 100644 index 884b66bf3..000000000 --- a/owl/Owl_regression/Make_Embedded/argument-1-A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_regression.Make_Embedded.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/argument-1-A/Mat/index.html b/owl/Owl_regression/Make_Embedded/argument-1-A/Mat/index.html deleted file mode 100644 index 446b3adaa..000000000 --- a/owl/Owl_regression/Make_Embedded/argument-1-A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_regression.Make_Embedded.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/argument-1-A/Scalar/index.html b/owl/Owl_regression/Make_Embedded/argument-1-A/Scalar/index.html deleted file mode 100644 index fe2f5f8c7..000000000 --- a/owl/Owl_regression/Make_Embedded/argument-1-A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_regression.Make_Embedded.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/argument-1-A/index.html b/owl/Owl_regression/Make_Embedded/argument-1-A/index.html deleted file mode 100644 index 3b238c227..000000000 --- a/owl/Owl_regression/Make_Embedded/argument-1-A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl.Owl_regression.Make_Embedded.A)

Parameter Make_Embedded.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_regression/Make_Embedded/index.html b/owl/Owl_regression/Make_Embedded/index.html deleted file mode 100644 index d1120be1f..000000000 --- a/owl/Owl_regression/Make_Embedded/index.html +++ /dev/null @@ -1,44 +0,0 @@ - -Make_Embedded (owl.Owl_regression.Make_Embedded)

Module Owl_regression.Make_Embedded

Parameters

Signature

include sig ... end
module Optimise : sig ... end
val ridge : - ?i:bool -> - ?alpha:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val lasso : - ?i:bool -> - ?alpha:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val elastic_net : - ?i:bool -> - ?alpha:float -> - ?l1_ratio:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val svm : - ?i:bool -> - ?a:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val logistic : - ?i:bool -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/A/Linalg/index.html b/owl/Owl_regression/S/Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index 908c54d69..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_regression.S.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/A/Mat/index.html b/owl/Owl_regression/S/Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index 093d8a939..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_regression.S.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/A/Scalar/index.html b/owl/Owl_regression/S/Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index a7ece5853..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_regression.S.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/A/index.html b/owl/Owl_regression/S/Optimise/Algodiff/A/index.html deleted file mode 100644 index 3044f4841..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,160 +0,0 @@ - -A (owl.Owl_regression.S.Optimise.Algodiff.A)

Module Algodiff.A

val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/Arr/index.html b/owl/Owl_regression/S/Optimise/Algodiff/Arr/index.html deleted file mode 100644 index 68f3317a2..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl.Owl_regression.S.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/Builder/index.html b/owl/Owl_regression/S/Optimise/Algodiff/Builder/index.html deleted file mode 100644 index 1a54efd3d..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl.Owl_regression.S.Optimise.Algodiff.Builder)

Module Algodiff.Builder

module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t
module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t
module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t
module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array
module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t
module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index 75a798039..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl.Owl_regression.S.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index df71215d3..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl.Owl_regression.S.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index 993c85fc4..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl.Owl_regression.S.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index 7182dc7eb..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl.Owl_regression.S.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index 113884789..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl.Owl_regression.S.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index 1b36eac7a..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl.Owl_regression.S.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/Linalg/index.html b/owl/Owl_regression/S/Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index f8f9ca42b..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_regression.S.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t
val logdet : t -> t
val chol : ?upper:bool -> t -> t
val qr : t -> t * t
val lq : t -> t * t
val svd : ?thin:bool -> t -> t * t * t
val sylvester : t -> t -> t -> t
val lyapunov : t -> t -> t
val discrete_lyapunov : - ?solver:[ `bilinear | `default | `direct ] -> - t -> - t -> - t
val (/@) : t -> t -> t
val linsolve : ?trans:bool -> ?typ:[ `l | `n | `u ] -> t -> t -> t
val care : ?diag_r:bool -> t -> t -> t -> t -> t
val dare : ?diag_r:bool -> t -> t -> t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/Mat/index.html b/owl/Owl_regression/S/Optimise/Algodiff/Mat/index.html deleted file mode 100644 index 8c398a100..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_regression.S.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/Maths/index.html b/owl/Owl_regression/S/Optimise/Algodiff/Maths/index.html deleted file mode 100644 index f3d8097a3..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl.Owl_regression.S.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t
val (-) : t -> t -> t
val (*) : t -> t -> t
val (/) : t -> t -> t
val (*@) : t -> t -> t
val (**) : t -> t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val kron : t -> t -> t
val dot : t -> t -> t
val pow : t -> t -> t
val atan2 : t -> t -> t
val min2 : t -> t -> t
val max2 : t -> t -> t
val cross_entropy : t -> t -> t
val inv : t -> t
val neg : t -> t
val abs : t -> t
val signum : t -> t
val floor : t -> t
val ceil : t -> t
val round : t -> t
val sqr : t -> t
val sqrt : t -> t
val log : t -> t
val log2 : t -> t
val log10 : t -> t
val exp : t -> t
val sin : t -> t
val cos : t -> t
val tan : t -> t
val sinh : t -> t
val cosh : t -> t
val tanh : t -> t
val asin : t -> t
val acos : t -> t
val atan : t -> t
val asinh : t -> t
val acosh : t -> t
val atanh : t -> t
val sum' : t -> t
val log_sum_exp' : t -> t
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t
val sum : ?axis:int -> ?keep_dims:bool -> t -> t
val sum_reduce : ?axis:int array -> t -> t
val mean : t -> t
val transpose : ?axis:int array -> t -> t
val swap : int -> int -> t -> t
val l1norm' : t -> t
val l2norm' : t -> t
val l2norm_sqr' : t -> t
val sigmoid : t -> t
val relu : t -> t
val dawsn : t -> t
val softplus : t -> t
val softsign : t -> t
val softmax : ?axis:int -> t -> t
val reshape : t -> int array -> t
val flatten : t -> t
val get_item : t -> int -> int -> t
val get_row : t -> int -> t
val concat : axis:int -> t -> t -> t
val split : axis:int -> int array -> t -> t array
val of_arrays : t array array -> t
val to_arrays : t -> t array array
val concatenate : axis:int -> t array -> t
val stack : axis:int -> t array -> t
val get_slice : int list list -> t -> t
val set_slice : int list list -> t -> t -> t
val get_fancy : Owl_types.index list -> t -> t
val set_fancy : Owl_types.index list -> t -> t -> t
val diag : ?k:int -> t -> t
val diagm : ?k:int -> t -> t
val trace : t -> t
val triu : ?k:int -> t -> t
val tril : ?k:int -> t -> t
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/NN/index.html b/owl/Owl_regression/S/Optimise/Algodiff/NN/index.html deleted file mode 100644 index 6f8785764..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl.Owl_regression.S.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t
val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t
val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t
val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t
val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t
val upsampling2d : t -> int array -> t
val pad : ?v:A.elt -> int list list -> t -> t
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Algodiff/index.html b/owl/Owl_regression/S/Optimise/Algodiff/index.html deleted file mode 100644 index 4527c490d..000000000 --- a/owl/Owl_regression/S/Optimise/Algodiff/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Algodiff (owl.Owl_regression.S.Optimise.Algodiff)

Module Optimise.Algodiff

module A : sig ... end
type t = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Algodiff.t = -
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
val tag : unit -> int
val primal : t -> t
val primal' : t -> t
val zero : t -> t
val reset_zero : t -> t
val tangent : t -> t
val adjref : t -> t Stdlib.ref
val adjval : t -> t
val shape : t -> int array
val is_float : t -> bool
val is_arr : t -> bool
val row_num : t -> int
val col_num : t -> int
val numel : t -> int
val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t
val clip_by_l2norm : A.elt -> t -> t
val copy_primal' : t -> t
val tile : t -> int array -> t
val repeat : t -> int array -> t
val pack_elt : A.elt -> t
val unpack_elt : t -> A.elt
val pack_flt : float -> t
val _f : float -> t
val unpack_flt : t -> float
val pack_arr : A.arr -> t
val unpack_arr : t -> A.arr
val deep_info : t -> string
val type_info : t -> string
val error_binop : string -> t -> t -> 'a
val error_uniop : string -> t -> 'a
val make_forward : t -> t -> int -> t
val make_reverse : t -> int -> t
val reverse_prop : t -> t -> unit
val diff : (t -> t) -> t -> t
val diff' : (t -> t) -> t -> t * t
val grad : (t -> t) -> t -> t
val grad' : (t -> t) -> t -> t * t
val jacobian : (t -> t) -> t -> t
val jacobian' : (t -> t) -> t -> t * t
val jacobianv : (t -> t) -> t -> t -> t
val jacobianv' : (t -> t) -> t -> t -> t * t
val jacobianTv : (t -> t) -> t -> t -> t
val jacobianTv' : (t -> t) -> t -> t -> t * t
val hessian : (t -> t) -> t -> t
val hessian' : (t -> t) -> t -> t * t
val hessianv : (t -> t) -> t -> t -> t
val hessianv' : (t -> t) -> t -> t -> t * t
val laplacian : (t -> t) -> t -> t
val laplacian' : (t -> t) -> t -> t * t
val gradhessian : (t -> t) -> t -> t * t
val gradhessian' : (t -> t) -> t -> t * t * t
val gradhessianv : (t -> t) -> t -> t -> t * t
val gradhessianv' : (t -> t) -> t -> t -> t * t * t
module Builder : sig ... end
module Maths : sig ... end
module Linalg : sig ... end
module NN : sig ... end
module Mat : sig ... end
module Arr : sig ... end
val to_trace : t list -> string
val to_dot : t list -> string
val pp_num : Stdlib.Format.formatter -> t -> unit
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Batch/index.html b/owl/Owl_regression/S/Optimise/Batch/index.html deleted file mode 100644 index 996168421..000000000 --- a/owl/Owl_regression/S/Optimise/Batch/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Batch (owl.Owl_regression.S.Optimise.Batch)

Module Optimise.Batch

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Batch.typ = -
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic
val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val batches : typ -> Algodiff.t -> int
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Checkpoint/index.html b/owl/Owl_regression/S/Optimise/Checkpoint/index.html deleted file mode 100644 index a9d1cae6f..000000000 --- a/owl/Owl_regression/S/Optimise/Checkpoint/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Checkpoint (owl.Owl_regression.S.Optimise.Checkpoint)

Module Optimise.Checkpoint

type state = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Checkpoint.state = - {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}
type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Checkpoint.typ = -
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None
val init_state : int -> float -> state
val default_checkpoint_fun : (string -> 'a) -> 'a
val print_state_info : state -> unit
val print_summary : state -> unit
val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Clipping/index.html b/owl/Owl_regression/S/Optimise/Clipping/index.html deleted file mode 100644 index 32a3bf15b..000000000 --- a/owl/Owl_regression/S/Optimise/Clipping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Clipping (owl.Owl_regression.S.Optimise.Clipping)

Module Optimise.Clipping

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Clipping.typ = -
  1. | L2norm of float
  2. | Value of float * float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Gradient/index.html b/owl/Owl_regression/S/Optimise/Gradient/index.html deleted file mode 100644 index e3a3aa1e7..000000000 --- a/owl/Owl_regression/S/Optimise/Gradient/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Gradient (owl.Owl_regression.S.Optimise.Gradient)

Module Optimise.Gradient

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Gradient.typ = -
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton
val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Learning_Rate/index.html b/owl/Owl_regression/S/Optimise/Learning_Rate/index.html deleted file mode 100644 index 34b05f9c2..000000000 --- a/owl/Owl_regression/S/Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Learning_Rate (owl.Owl_regression.S.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Learning_Rate.typ = -
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t
val default : typ -> typ
val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Loss/index.html b/owl/Owl_regression/S/Optimise/Loss/index.html deleted file mode 100644 index bead12612..000000000 --- a/owl/Owl_regression/S/Optimise/Loss/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Loss (owl.Owl_regression.S.Optimise.Loss)

Module Optimise.Loss

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Loss.typ = -
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Momentum/index.html b/owl/Owl_regression/S/Optimise/Momentum/index.html deleted file mode 100644 index a43a5d27f..000000000 --- a/owl/Owl_regression/S/Optimise/Momentum/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Momentum (owl.Owl_regression.S.Optimise.Momentum)

Module Optimise.Momentum

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Momentum.typ = -
  1. | Standard of float
  2. | Nesterov of float
  3. | None
val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Params/index.html b/owl/Owl_regression/S/Optimise/Params/index.html deleted file mode 100644 index 6957ca079..000000000 --- a/owl/Owl_regression/S/Optimise/Params/index.html +++ /dev/null @@ -1,16 +0,0 @@ - -Params (owl.Owl_regression.S.Optimise.Params)

Module Optimise.Params

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Params.typ = - {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}
val default : unit -> typ
val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Regularisation/index.html b/owl/Owl_regression/S/Optimise/Regularisation/index.html deleted file mode 100644 index d34734447..000000000 --- a/owl/Owl_regression/S/Optimise/Regularisation/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Regularisation (owl.Owl_regression.S.Optimise.Regularisation)

Module Optimise.Regularisation

type typ = - Owl_optimise_generic.Make(Owl_algodiff_generic.Make(Owl_algodiff_primal_ops.S)).Regularisation.typ = -
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None
val run : typ -> Algodiff.t -> Algodiff.t
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Stopping/index.html b/owl/Owl_regression/S/Optimise/Stopping/index.html deleted file mode 100644 index 85575dd5f..000000000 --- a/owl/Owl_regression/S/Optimise/Stopping/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -Stopping (owl.Owl_regression.S.Optimise.Stopping)

Module Optimise.Stopping

val run : typ -> float -> bool
val default : typ -> typ
val to_string : typ -> string
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/Utils/index.html b/owl/Owl_regression/S/Optimise/Utils/index.html deleted file mode 100644 index ad8d58a00..000000000 --- a/owl/Owl_regression/S/Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl.Owl_regression.S.Optimise.Utils)

Module Optimise.Utils

val sample_num : Algodiff.t -> int
val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t
val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t
\ No newline at end of file diff --git a/owl/Owl_regression/S/Optimise/index.html b/owl/Owl_regression/S/Optimise/index.html deleted file mode 100644 index 68dbfe431..000000000 --- a/owl/Owl_regression/S/Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl.Owl_regression.S.Optimise)

Module S.Optimise

module Algodiff : sig ... end
module Utils : sig ... end
module Learning_Rate : sig ... end
module Batch : sig ... end
module Loss : sig ... end
module Gradient : sig ... end
module Momentum : sig ... end
module Regularisation : sig ... end
module Clipping : sig ... end
module Stopping : sig ... end
module Checkpoint : sig ... end
module Params : sig ... end
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t
val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state
\ No newline at end of file diff --git a/owl/Owl_regression/S/index.html b/owl/Owl_regression/S/index.html deleted file mode 100644 index 052526ecb..000000000 --- a/owl/Owl_regression/S/index.html +++ /dev/null @@ -1,44 +0,0 @@ - -S (owl.Owl_regression.S)

Module Owl_regression.S

module Optimise : sig ... end
val ridge : - ?i:bool -> - ?alpha:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val lasso : - ?i:bool -> - ?alpha:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val elastic_net : - ?i:bool -> - ?alpha:float -> - ?l1_ratio:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val svm : - ?i:bool -> - ?a:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val logistic : - ?i:bool -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
\ No newline at end of file diff --git a/owl/Owl_regression_generic/.dummy b/owl/Owl_regression_generic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/Linalg/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index b9fa61052..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_regression_generic.Make.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/Mat/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index 1f2458cf2..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_regression_generic.Make.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/Scalar/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index f70e897a9..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_regression_generic.Make.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/index.html deleted file mode 100644 index c7edfb82e..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl.Owl_regression_generic.Make.Optimise.Algodiff.A)

Module Algodiff.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Arr/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Arr/index.html deleted file mode 100644 index ee861dd07..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl.Owl_regression_generic.Make.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/index.html deleted file mode 100644 index ee55d191d..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl.Owl_regression_generic.Make.Optimise.Algodiff.Builder)

Module Algodiff.Builder

Ops Builder
module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t

build array input single output operations

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index 66230f748..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl.Owl_regression_generic.Make.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index 9396c5f44..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl.Owl_regression_generic.Make.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index 2aab12248..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl.Owl_regression_generic.Make.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index 2f6be97ba..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl.Owl_regression_generic.Make.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index acbeb5392..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl.Owl_regression_generic.Make.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index b7b79d75c..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl.Owl_regression_generic.Make.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Linalg/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index 1f344d85f..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_regression_generic.Make.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val logdet : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val chol : ?upper:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val qr : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val lq : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val svd : ?thin:bool -> t -> t * t * t

Refer to :doc:`owl_dense_ndarray_generic`

val sylvester : t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val lyapunov : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - t -> - t -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val (/@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val care : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dare : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Mat/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Mat/index.html deleted file mode 100644 index 5830e6a5d..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_regression_generic.Make.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Maths/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Maths/index.html deleted file mode 100644 index 7c806e5a0..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl.Owl_regression_generic.Make.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (-) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (/) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (**) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val add : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val div : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val kron : t -> t -> t

Refer to :doc:`owl_dense_matrix_generic`

val dot : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cross_entropy : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val abs : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val round : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mean : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val swap : int -> int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_item : t -> int -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_row : t -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val concat : axis:int -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val split : axis:int -> int array -> t -> t array

Refer to :doc:`owl_dense_ndarray_generic`

val of_arrays : t array array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val to_arrays : t -> t array array

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diag : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diagm : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val trace : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val triu : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tril : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/NN/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/NN/index.html deleted file mode 100644 index b3b2eab41..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl.Owl_regression_generic.Make.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:A.elt -> int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/index.html deleted file mode 100644 index 54ede71a7..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Algodiff/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Algodiff (owl.Owl_regression_generic.Make.Optimise.Algodiff)

Module Optimise.Algodiff

include Owl_algodiff_core_sig.Sig
Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

val make_forward : t -> t -> int -> t

TODO

val make_reverse : t -> int -> t

TODO

val reverse_prop : t -> t -> unit

TODO

val diff : (t -> t) -> t -> t

diff f x returns the exat derivative of a function f : scalar -> scalar at point x. Simply calling diff f will return its derivative function g of the same type, i.e. g : scalar -> scalar.

Keep calling this function will give you higher-order derivatives of f, i.e. f |> diff |> diff |> diff |> ...

val diff' : (t -> t) -> t -> t * t

similar to diff, but return (f x, diff f x).

val grad : (t -> t) -> t -> t

gradient of f : (vector -> scalar) at x, reverse ad.

val grad' : (t -> t) -> t -> t * t

similar to grad, but return (f x, grad f x).

val jacobian : (t -> t) -> t -> t

jacobian of f : (vector -> vector) at x, both x and y are row vectors.

val jacobian' : (t -> t) -> t -> t * t

similar to jacobian, but return (f x, jacobian f x)

val jacobianv : (t -> t) -> t -> t -> t

jacobian vector product of f : (vector -> vector) at x along v, forward ad. Namely, it calcultes (jacobian x) v

val jacobianv' : (t -> t) -> t -> t -> t * t

similar to jacobianv', but return (f x, jacobianv f x v)

val jacobianTv : (t -> t) -> t -> t -> t

transposed jacobian vector product of f : (vector -> vector) at x along v, backward ad. Namely, it calculates transpose ((jacobianv f x v)).

val jacobianTv' : (t -> t) -> t -> t -> t * t

similar to jacobianTv, but return (f x, transpose (jacobianv f x v))

val hessian : (t -> t) -> t -> t

hessian of f : (scalar -> scalar) at x.

val hessian' : (t -> t) -> t -> t * t

simiarl to hessian, but return (f x, hessian f x)

val hessianv : (t -> t) -> t -> t -> t

hessian vector product of f : (scalar -> scalar) at x along v. Namely, it calculates (hessian x) v.

val hessianv' : (t -> t) -> t -> t -> t * t

similar to hessianv, but return (f x, hessianv f x v).

val laplacian : (t -> t) -> t -> t

laplacian of f : (scalar -> scalar) at x.

val laplacian' : (t -> t) -> t -> t * t

similar to laplacian, but return (f x, laplacian f x).

val gradhessian : (t -> t) -> t -> t * t

return (grad f x, hessian f x), f : (scalar -> scalar)

val gradhessian' : (t -> t) -> t -> t * t * t

return (f x, grad f x, hessian f x)

val gradhessianv : (t -> t) -> t -> t -> t * t

return (grad f x v, hessian f x v)

val gradhessianv' : (t -> t) -> t -> t -> t * t * t

return (f x, grad f x v, hessian f x v)

include Owl_algodiff_ops_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
module Builder : - Owl_algodiff_ops_builder_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
Supported Maths functions
module Maths : sig ... end
Supported Linalg functions
module Linalg : sig ... end
Supported Neural Network functions
module NN : sig ... end
Supported Mat functions
module Mat : sig ... end
Supported Arr functions
module Arr : sig ... end
Helper functions
include Owl_algodiff_graph_convert_sig.Sig with type t := t
val to_trace : t list -> string

to_trace [t0; t1; ...] outputs the trace of computation graph on the terminal in a human-readable format.

val to_dot : t list -> string

to_dot [t0; t1; ...] outputs the trace of computation graph in the dot file format which you can use other tools further visualisation, such as Graphviz.

val pp_num : Stdlib.Format.formatter -> t -> unit

pp_num t pretty prints the abstract number used in Algodiff.

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Batch/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Batch/index.html deleted file mode 100644 index e35a5ecef..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Batch/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Batch (owl.Owl_regression_generic.Make.Optimise.Batch)

Module Optimise.Batch

Batch module

type typ =
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic

Types of batches.

val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

Execute the computations defined in module typ.

val batches : typ -> Algodiff.t -> int

Return the total number of batches given a batch typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Checkpoint/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Checkpoint/index.html deleted file mode 100644 index e4b64d9d0..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Checkpoint/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Checkpoint (owl.Owl_regression_generic.Make.Optimise.Checkpoint)

Module Optimise.Checkpoint

Checkpoint module

type state = {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}

Type definition of checkpoint

type typ =
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None

Batch type.

val init_state : int -> float -> state

init_state batches_per_epoch epochs initialises a state by specifying the number of batches per epoch and the number of epochs in total.

val default_checkpoint_fun : (string -> 'a) -> 'a

This function is used for saving intermediate files during optimisation.

val print_state_info : state -> unit

Print out the detail information of current state.

val print_summary : state -> unit

Print out the summary of current state.

val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Clipping/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Clipping/index.html deleted file mode 100644 index 28589931a..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Clipping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Clipping (owl.Owl_regression_generic.Make.Optimise.Clipping)

Module Optimise.Clipping

Clipping module

type typ =
  1. | L2norm of float
  2. | Value of float * float
  3. | None

Types of clipping functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Gradient/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Gradient/index.html deleted file mode 100644 index 2f07ec616..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Gradient/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Gradient (owl.Owl_regression_generic.Make.Optimise.Gradient)

Module Optimise.Gradient

Gradient module

type typ =
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton

Types of gradient function.

val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Learning_Rate/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Learning_Rate/index.html deleted file mode 100644 index bb95638b1..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Learning_Rate (owl.Owl_regression_generic.Make.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

Strategies for learning rate update

type typ =
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array

Representation of learning rate update strategies. Possible values include:

  • Adam (alpha, beta1, beta2), see ref for parameter meaning
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array

Update the cache of gradients.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Loss/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Loss/index.html deleted file mode 100644 index a17e2ea68..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Loss/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Loss (owl.Owl_regression_generic.Make.Optimise.Loss)

Module Optimise.Loss

Loss module

type typ =
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t

Types of loss functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Momentum/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Momentum/index.html deleted file mode 100644 index a23b5b2cf..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Momentum/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Momentum (owl.Owl_regression_generic.Make.Optimise.Momentum)

Module Optimise.Momentum

Momentum module

type typ =
  1. | Standard of float
  2. | Nesterov of float
  3. | None

Types of momentum functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Params/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Params/index.html deleted file mode 100644 index 2145eab06..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Params/index.html +++ /dev/null @@ -1,14 +0,0 @@ - -Params (owl.Owl_regression_generic.Make.Optimise.Params)

Module Optimise.Params

Params module

type typ = {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}

Type definition of parameter.

val default : unit -> typ

Create module typ with default values.

val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ

This function creates a parameter object with many configurations.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Regularisation/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Regularisation/index.html deleted file mode 100644 index e590d299d..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Regularisation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Regularisation (owl.Owl_regression_generic.Make.Optimise.Regularisation)

Module Optimise.Regularisation

Regularisation module

type typ =
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None

Types of regularisation functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Stopping/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Stopping/index.html deleted file mode 100644 index e8d847616..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Stopping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Stopping (owl.Owl_regression_generic.Make.Optimise.Stopping)

Module Optimise.Stopping

Stopping module

type typ =
  1. | Const of float
  2. | Early of int * int
  3. | None

Types of stopping functions.

val run : typ -> float -> bool

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/Utils/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/Utils/index.html deleted file mode 100644 index f15de8245..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl.Owl_regression_generic.Make.Optimise.Utils)

Module Optimise.Utils

Utils module

val sample_num : Algodiff.t -> int

Return the total number of samples in passed in ndarray.

val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

draw_samples x y draws samples from both x (observations) and y (labels). The samples will be drew along axis 0, so x and y must agree along axis 0.

val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t

get_chunk x y i c gets a continuous chunk of c samples from position i from x (observations) and y (labels).

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/argument-1-Optimise/index.html b/owl/Owl_regression_generic/Make/argument-1-Optimise/index.html deleted file mode 100644 index 49f7b00d0..000000000 --- a/owl/Owl_regression_generic/Make/argument-1-Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl.Owl_regression_generic.Make.Optimise)

Parameter Make.Optimise

module Utils : sig ... end

Utils module

module Learning_Rate : sig ... end

Strategies for learning rate update

module Batch : sig ... end

Batch module

module Loss : sig ... end

Loss module

module Gradient : sig ... end

Gradient module

module Momentum : sig ... end

Momentum module

module Regularisation : sig ... end

Regularisation module

module Clipping : sig ... end

Clipping module

module Stopping : sig ... end

Stopping module

module Checkpoint : sig ... end

Checkpoint module

module Params : sig ... end

Params module

Core functions
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises the weight w of passed-in function f.

* f is a function f : w -> x -> y. * w is a row vector but y can have any shape.

val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

This function is specifically designed for minimising the weights in a neural network of graph structure. In Owl's earlier versions, the functions in the regression module were actually implemented using this function.

val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises f : x -> y w.r.t x.

x is an ndarray; and y is an scalar value.

val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

TODO

\ No newline at end of file diff --git a/owl/Owl_regression_generic/Make/index.html b/owl/Owl_regression_generic/Make/index.html deleted file mode 100644 index 63bf8fffc..000000000 --- a/owl/Owl_regression_generic/Make/index.html +++ /dev/null @@ -1,44 +0,0 @@ - -Make (owl.Owl_regression_generic.Make)

Module Owl_regression_generic.Make

Parameters

Signature

module Optimise = Optimise
val ridge : - ?i:bool -> - ?alpha:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val lasso : - ?i:bool -> - ?alpha:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val elastic_net : - ?i:bool -> - ?alpha:float -> - ?l1_ratio:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val svm : - ?i:bool -> - ?a:float -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
val logistic : - ?i:bool -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr -> - Optimise.Algodiff.A.arr array
\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/.dummy b/owl/Owl_regression_generic_sig/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/Linalg/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/Linalg/index.html deleted file mode 100644 index 98183b35b..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.A.Linalg)

Module A.Linalg

val inv : arr -> arr
val logdet : arr -> elt
val chol : ?upper:bool -> arr -> arr
val svd : ?thin:bool -> arr -> arr * arr * arr
val qr : arr -> arr * arr
val lq : arr -> arr * arr
val sylvester : arr -> arr -> arr -> arr
val lyapunov : arr -> arr -> arr
val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - arr -> - arr -> - arr
val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> arr -> arr -> arr
val care : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
val dare : ?diag_r:bool -> arr -> arr -> arr -> arr -> arr
\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/Mat/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/Mat/index.html deleted file mode 100644 index 32555c128..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.A.Mat)

Module A.Mat

val diagm : ?k:int -> arr -> arr
val triu : ?k:int -> arr -> arr
val tril : ?k:int -> arr -> arr
val eye : int -> arr
\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/Scalar/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/Scalar/index.html deleted file mode 100644 index 090dd08f6..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/Scalar/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Scalar (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.A.Scalar)

Module A.Scalar

val add : elt -> elt -> elt
val sub : elt -> elt -> elt
val mul : elt -> elt -> elt
val div : elt -> elt -> elt
val pow : elt -> elt -> elt
val atan2 : elt -> elt -> elt
val abs : elt -> elt
val neg : elt -> elt
val sqr : elt -> elt
val sqrt : elt -> elt
val exp : elt -> elt
val log : elt -> elt
val log2 : elt -> elt
val log10 : elt -> elt
val signum : elt -> elt
val floor : elt -> elt
val ceil : elt -> elt
val round : elt -> elt
val sin : elt -> elt
val cos : elt -> elt
val tan : elt -> elt
val sinh : elt -> elt
val cosh : elt -> elt
val tanh : elt -> elt
val asin : elt -> elt
val acos : elt -> elt
val atan : elt -> elt
val asinh : elt -> elt
val acosh : elt -> elt
val atanh : elt -> elt
val relu : elt -> elt
val dawsn : elt -> elt
val sigmoid : elt -> elt
\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/index.html deleted file mode 100644 index f89571685..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/A/index.html +++ /dev/null @@ -1,158 +0,0 @@ - -A (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.A)

Module Algodiff.A

include Owl_types_ndarray_eltcmp.Sig
include Owl_types_ndarray_basic.Sig
type arr
type elt
val empty : int array -> arr
val zeros : int array -> arr
val ones : int array -> arr
val create : int array -> elt -> arr
val sequential : ?a:elt -> ?step:elt -> int array -> arr
val uniform : ?a:elt -> ?b:elt -> int array -> arr
val gaussian : ?mu:elt -> ?sigma:elt -> int array -> arr
val bernoulli : ?p:elt -> int array -> arr
val init : int array -> (int -> elt) -> arr
val init_nd : int array -> (int array -> elt) -> arr
val shape : arr -> int array
val numel : arr -> int
val get : arr -> int array -> elt
val set : arr -> int array -> elt -> unit
val get_slice : int list list -> arr -> arr
val set_slice : int list list -> arr -> arr -> unit
val get_fancy : Owl_types_common.index list -> arr -> arr
val set_fancy : Owl_types_common.index list -> arr -> arr -> unit
val copy : arr -> arr
val copy_ : out:arr -> arr -> unit
val reset : arr -> unit
val reshape : arr -> int array -> arr
val reverse : arr -> arr
val tile : arr -> int array -> arr
val repeat : arr -> int array -> arr
val concatenate : ?axis:int -> arr array -> arr
val stack : ?axis:int -> arr array -> arr
val split : ?axis:int -> int array -> arr -> arr array
val expand : ?hi:bool -> arr -> int -> arr
val squeeze : ?axis:int array -> arr -> arr
val draw : ?axis:int -> arr -> int -> arr * int array
val map : (elt -> elt) -> arr -> arr
val fold : ?axis:int -> (elt -> elt -> elt) -> elt -> arr -> arr
val scan : ?axis:int -> (elt -> elt -> elt) -> arr -> arr
val one_hot : int -> arr -> arr
val pad : ?v:elt -> int list list -> arr -> arr
val print : - ?max_row:int -> - ?max_col:int -> - ?header:bool -> - ?fmt:(elt -> string) -> - arr -> - unit
val abs : arr -> arr
val neg : arr -> arr
val floor : arr -> arr
val ceil : arr -> arr
val round : arr -> arr
val sqr : arr -> arr
val sqrt : arr -> arr
val log : arr -> arr
val log2 : arr -> arr
val log10 : arr -> arr
val exp : arr -> arr
val sin : arr -> arr
val cos : arr -> arr
val tan : arr -> arr
val sinh : arr -> arr
val cosh : arr -> arr
val tanh : arr -> arr
val asin : arr -> arr
val acos : arr -> arr
val atan : arr -> arr
val asinh : arr -> arr
val acosh : arr -> arr
val atanh : arr -> arr
val min : ?axis:int -> ?keep_dims:bool -> arr -> arr
val max : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum : ?axis:int -> ?keep_dims:bool -> arr -> arr
val sum_reduce : ?axis:int array -> arr -> arr
val signum : arr -> arr
val sigmoid : arr -> arr
val relu : arr -> arr
val dawsn : arr -> arr
val min' : arr -> elt
val max' : arr -> elt
val sum' : arr -> elt
val log_sum_exp' : arr -> elt
val log_sum_exp : ?axis:int -> ?keep_dims:bool -> arr -> arr
val l1norm' : arr -> elt
val l2norm' : arr -> elt
val l2norm_sqr' : arr -> elt
val clip_by_value : ?amin:elt -> ?amax:elt -> arr -> arr
val clip_by_l2norm : elt -> arr -> arr
val pow : arr -> arr -> arr
val scalar_pow : elt -> arr -> arr
val pow_scalar : arr -> elt -> arr
val atan2 : arr -> arr -> arr
val scalar_atan2 : elt -> arr -> arr
val atan2_scalar : arr -> elt -> arr
val add : arr -> arr -> arr
val sub : arr -> arr -> arr
val mul : arr -> arr -> arr
val div : arr -> arr -> arr
val add_scalar : arr -> elt -> arr
val sub_scalar : arr -> elt -> arr
val mul_scalar : arr -> elt -> arr
val div_scalar : arr -> elt -> arr
val scalar_add : elt -> arr -> arr
val scalar_sub : elt -> arr -> arr
val scalar_mul : elt -> arr -> arr
val scalar_div : elt -> arr -> arr
val fma : arr -> arr -> arr -> arr
val conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val dilated_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val dilated_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - int array -> - arr
val transpose_conv1d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv2d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val transpose_conv3d : - ?padding:Owl_types_common.padding -> - arr -> - arr -> - int array -> - arr
val max_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val max_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool1d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool2d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val avg_pool3d : - ?padding:Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr
val upsampling2d : arr -> int array -> arr
val conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val dilated_conv1d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv1d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv2d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_input : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val dilated_conv3d_backward_kernel : - arr -> - arr -> - int array -> - int array -> - arr -> - arr
val transpose_conv1d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv1d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv2d_backward_kernel : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_input : arr -> arr -> int array -> arr -> arr
val transpose_conv3d_backward_kernel : arr -> arr -> int array -> arr -> arr
val max_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val max_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool1d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool2d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val avg_pool3d_backward : - Owl_types_common.padding -> - arr -> - int array -> - int array -> - arr -> - arr
val upsampling2d_backward : arr -> int array -> arr -> arr
val row_num : arr -> int
val col_num : arr -> int
val row : arr -> int -> arr
val rows : arr -> int array -> arr
val copy_row_to : arr -> arr -> int -> unit
val copy_col_to : arr -> arr -> int -> unit
val dot : arr -> arr -> arr
val diag : ?k:int -> arr -> arr
val trace : arr -> elt
val transpose : ?axis:int array -> arr -> arr
val to_rows : arr -> arr array
val of_rows : arr array -> arr
val to_cols : arr -> arr array
val of_cols : arr array -> arr
val of_array : elt array -> int array -> arr
val of_arrays : elt array array -> arr
val float_to_elt : float -> elt
val elt_to_float : elt -> float
val elt_equal : arr -> arr -> arr
val elt_not_equal : arr -> arr -> arr
val elt_less : arr -> arr -> arr
val elt_greater : arr -> arr -> arr
val elt_less_equal : arr -> arr -> arr
val elt_greater_equal : arr -> arr -> arr
val elt_equal_scalar : arr -> elt -> arr
val elt_not_equal_scalar : arr -> elt -> arr
val elt_less_scalar : arr -> elt -> arr
val elt_greater_scalar : arr -> elt -> arr
val elt_less_equal_scalar : arr -> elt -> arr
val elt_greater_equal_scalar : arr -> elt -> arr
module Scalar : sig ... end
module Mat : sig ... end
module Linalg : sig ... end
\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Arr/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Arr/index.html deleted file mode 100644 index 7c7bdfd2c..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Arr/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Arr (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.Arr)

Module Algodiff.Arr

val empty : int array -> t
val zeros : int array -> t
val ones : int array -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int array -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int array -> t
val shape : t -> int array
val numel : t -> int
val reset : t -> unit
val reshape : t -> int array -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/index.html deleted file mode 100644 index a0ca53241..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Builder (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.Builder)

Module Algodiff.Builder

Ops Builder
module type Siso = sig ... end
val build_siso : (module Siso) -> t -> t

build single input single output operations

module type Sipo = sig ... end
val build_sipo : (module Sipo) -> t -> t * t

build single input pair outputs operations

module type Sito = sig ... end
val build_sito : (module Sito) -> t -> t * t * t

build single input triple outputs operations

module type Siao = sig ... end
val build_siao : (module Siao) -> t -> t array

build single input array output operations

module type Piso = sig ... end
val build_piso : (module Piso) -> t -> t -> t

build pair inputs single output operations

module type Aiso = sig ... end
val build_aiso : (module Aiso) -> t array -> t

build array input single output operations

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Aiso/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Aiso/index.html deleted file mode 100644 index fe6c2163e..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Aiso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Aiso (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.Builder.Aiso)

Module type Builder.Aiso

val label : string
val ff : t array -> t
val df : int list -> t -> t array -> t array -> t
val dr : int list -> t array -> t -> t Stdlib.ref -> t list
\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Piso/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Piso/index.html deleted file mode 100644 index 17d5422a3..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Piso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Piso (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.Builder.Piso)

Module type Builder.Piso

val label : string
val ff_aa : A.elt -> A.elt -> t
val ff_ab : A.elt -> A.arr -> t
val ff_ba : A.arr -> A.elt -> t
val ff_bb : A.arr -> A.arr -> t
val df_da : t -> t -> t -> t -> t
val df_db : t -> t -> t -> t -> t
val df_dab : t -> t -> t -> t -> t -> t
val dr_ab : t -> t -> t -> t Stdlib.ref -> t * t
val dr_a : t -> t -> t -> t Stdlib.ref -> t
val dr_b : t -> t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siao/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siao/index.html deleted file mode 100644 index 34900d97c..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siao/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siao (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.Builder.Siao)

Module type Builder.Siao

val label : string
val ff_f : A.elt -> t array
val ff_arr : A.arr -> t array
val df : t array -> t -> t -> t array
val dr : t -> t -> t Stdlib.ref array -> t Stdlib.ref array -> t
\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sipo/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sipo/index.html deleted file mode 100644 index d764754d0..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sipo/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sipo (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.Builder.Sipo)

Module type Builder.Sipo

val label : string
val ff_f : A.elt -> t * t
val ff_arr : A.arr -> t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siso/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siso/index.html deleted file mode 100644 index 7b70f9fd3..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Siso/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Siso (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.Builder.Siso)

Module type Builder.Siso

val label : string
val ff_f : A.elt -> t
val ff_arr : A.arr -> t
val df : t -> t -> t -> t
val dr : t -> t -> t Stdlib.ref -> t
\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sito/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sito/index.html deleted file mode 100644 index f21904d57..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Builder/module-type-Sito/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Sito (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.Builder.Sito)

Module type Builder.Sito

val label : string
val ff_f : A.elt -> t * t * t
val ff_arr : A.arr -> t * t * t
val df : t -> t -> t -> t
val dr : - t -> - t -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - (t Stdlib.ref * t Stdlib.ref * t Stdlib.ref) -> - t
\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Linalg/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Linalg/index.html deleted file mode 100644 index 97029f6dd..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Linalg/index.html +++ /dev/null @@ -1,6 +0,0 @@ - -Linalg (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.Linalg)

Module Algodiff.Linalg

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val logdet : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val chol : ?upper:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val qr : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val lq : t -> t * t

Refer to :doc:`owl_dense_ndarray_generic`

val svd : ?thin:bool -> t -> t * t * t

Refer to :doc:`owl_dense_ndarray_generic`

val sylvester : t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val lyapunov : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val discrete_lyapunov : - ?solver:[ `default | `bilinear | `direct ] -> - t -> - t -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val (/@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val linsolve : ?trans:bool -> ?typ:[ `n | `u | `l ] -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val care : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dare : ?diag_r:bool -> t -> t -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Mat/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Mat/index.html deleted file mode 100644 index aad4b4748..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Mat/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Mat (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.Mat)

Module Algodiff.Mat

val empty : int -> int -> t
val zeros : int -> int -> t
val eye : int -> t
val ones : int -> int -> t
val uniform : ?a:A.elt -> ?b:A.elt -> int -> int -> t
val gaussian : ?mu:A.elt -> ?sigma:A.elt -> int -> int -> t
val shape : t -> int * int
val numel : t -> int
val row_num : t -> int
val col_num : t -> int
val reset : t -> unit
val reshape : int -> int -> t -> t
val get : t -> int -> int -> t
val set : t -> int -> int -> t -> t
val row : t -> int -> t
val mean : t -> t
val add : t -> t -> t
val sub : t -> t -> t
val mul : t -> t -> t
val div : t -> t -> t
val dot : t -> t -> t
val map_by_row : (t -> t) -> t -> t
val of_arrays : A.elt array array -> t
val init_2d : int -> int -> (int -> int -> t) -> t
val print : t -> unit
\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Maths/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Maths/index.html deleted file mode 100644 index 9fb2d951e..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/Maths/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Maths (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.Maths)

Module Algodiff.Maths

val (+) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (-) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (/) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (*@) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val (**) : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val add : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sub : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mul : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val div : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val kron : t -> t -> t

Refer to :doc:`owl_dense_matrix_generic`

val dot : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pow : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val min2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max2 : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cross_entropy : t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val inv : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val neg : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val abs : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val signum : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val floor : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val ceil : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val round : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqr : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sqrt : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log2 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log10 : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val exp : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val cosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asin : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acos : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atan : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val asinh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val acosh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val atanh : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val log_sum_exp : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum : ?axis:int -> ?keep_dims:bool -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sum_reduce : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val mean : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose : ?axis:int array -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val swap : int -> int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l1norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val l2norm_sqr' : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val sigmoid : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val relu : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dawsn : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softplus : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softsign : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val softmax : ?axis:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val reshape : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val flatten : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_item : t -> int -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_row : t -> int -> t

Refer to :doc:`owl_dense_ndarray_generic`

val concat : axis:int -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val split : axis:int -> int array -> t -> t array

Refer to :doc:`owl_dense_ndarray_generic`

val of_arrays : t array array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val to_arrays : t -> t array array

Refer to :doc:`owl_dense_ndarray_generic`

val concatenate : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val stack : axis:int -> t array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_slice : int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_slice : int list list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val get_fancy : Owl_types.index list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val set_fancy : Owl_types.index list -> t -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diag : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val diagm : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val trace : t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val triu : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val tril : ?k:int -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/NN/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/NN/index.html deleted file mode 100644 index 474d16784..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/NN/index.html +++ /dev/null @@ -1,20 +0,0 @@ - -NN (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff.NN)

Module Algodiff.NN

val dropout : ?rate:float -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv1d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv2d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val dilated_conv3d : - ?padding:Owl_types.padding -> - t -> - t -> - int array -> - int array -> - t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv1d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv2d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val transpose_conv3d : ?padding:Owl_types.padding -> t -> t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val max_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool1d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool2d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val avg_pool3d : Owl_types.padding -> t -> int array -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val upsampling2d : t -> int array -> t

Refer to :doc:`owl_dense_ndarray_generic`

val pad : ?v:A.elt -> int list list -> t -> t

Refer to :doc:`owl_dense_ndarray_generic`

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/index.html deleted file mode 100644 index 10de37e63..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Algodiff/index.html +++ /dev/null @@ -1,11 +0,0 @@ - -Algodiff (owl.Owl_regression_generic_sig.Sig.Optimise.Algodiff)

Module Optimise.Algodiff

include Owl_algodiff_core_sig.Sig
Type definition
include Owl_algodiff_types_sig.Sig with type elt := A.elt and type arr := A.arr
type t =
  1. | F of A.elt
  2. | Arr of A.arr
  3. | DF of t * t * int
  4. | DR of t * t Stdlib.ref * op * int Stdlib.ref * int * int Stdlib.ref
and adjoint = t -> t Stdlib.ref -> (t * t) list -> (t * t) list
and register = t list -> t list
and label = string * t list
and op = adjoint * register * label
Core functions
val tag : unit -> int

TODO

val primal : t -> t

TODO

val primal' : t -> t

TODO

val zero : t -> t

TODO

val reset_zero : t -> t

TODO

val tangent : t -> t

TODO

val adjref : t -> t Stdlib.ref

TODO

val adjval : t -> t

TODO

val shape : t -> int array

TODO

val is_float : t -> bool

TODO

val is_arr : t -> bool

TODO

val row_num : t -> int

number of rows

val col_num : t -> int

number of columns

val numel : t -> int

number of elements

val clip_by_value : amin:A.elt -> amax:A.elt -> t -> t

other functions, without tracking gradient

val clip_by_l2norm : A.elt -> t -> t

other functions, without tracking gradient

val copy_primal' : t -> t

TODO

val tile : t -> int array -> t

TODO

val repeat : t -> int array -> t

TODO

val pack_elt : A.elt -> t

convert from elt type to t type.

val unpack_elt : t -> A.elt

convert from t type to elt type.

val pack_flt : float -> t

convert from float type to t type.

val _f : float -> t

A shortcut function for F A.(float_to_elt x).

val unpack_flt : t -> float

convert from t type to float type.

val pack_arr : A.arr -> t

convert from arr type to t type.

val unpack_arr : t -> A.arr

convert from t type to arr type.

val deep_info : t -> string

TODO

val type_info : t -> string

TODO

val error_binop : string -> t -> t -> 'a

TODO

val error_uniop : string -> t -> 'a

TODO

val make_forward : t -> t -> int -> t

TODO

val make_reverse : t -> int -> t

TODO

val reverse_prop : t -> t -> unit

TODO

val diff : (t -> t) -> t -> t

diff f x returns the exat derivative of a function f : scalar -> scalar at point x. Simply calling diff f will return its derivative function g of the same type, i.e. g : scalar -> scalar.

Keep calling this function will give you higher-order derivatives of f, i.e. f |> diff |> diff |> diff |> ...

val diff' : (t -> t) -> t -> t * t

similar to diff, but return (f x, diff f x).

val grad : (t -> t) -> t -> t

gradient of f : (vector -> scalar) at x, reverse ad.

val grad' : (t -> t) -> t -> t * t

similar to grad, but return (f x, grad f x).

val jacobian : (t -> t) -> t -> t

jacobian of f : (vector -> vector) at x, both x and y are row vectors.

val jacobian' : (t -> t) -> t -> t * t

similar to jacobian, but return (f x, jacobian f x)

val jacobianv : (t -> t) -> t -> t -> t

jacobian vector product of f : (vector -> vector) at x along v, forward ad. Namely, it calcultes (jacobian x) v

val jacobianv' : (t -> t) -> t -> t -> t * t

similar to jacobianv', but return (f x, jacobianv f x v)

val jacobianTv : (t -> t) -> t -> t -> t

transposed jacobian vector product of f : (vector -> vector) at x along v, backward ad. Namely, it calculates transpose ((jacobianv f x v)).

val jacobianTv' : (t -> t) -> t -> t -> t * t

similar to jacobianTv, but return (f x, transpose (jacobianv f x v))

val hessian : (t -> t) -> t -> t

hessian of f : (scalar -> scalar) at x.

val hessian' : (t -> t) -> t -> t * t

simiarl to hessian, but return (f x, hessian f x)

val hessianv : (t -> t) -> t -> t -> t

hessian vector product of f : (scalar -> scalar) at x along v. Namely, it calculates (hessian x) v.

val hessianv' : (t -> t) -> t -> t -> t * t

similar to hessianv, but return (f x, hessianv f x v).

val laplacian : (t -> t) -> t -> t

laplacian of f : (scalar -> scalar) at x.

val laplacian' : (t -> t) -> t -> t * t

similar to laplacian, but return (f x, laplacian f x).

val gradhessian : (t -> t) -> t -> t * t

return (grad f x, hessian f x), f : (scalar -> scalar)

val gradhessian' : (t -> t) -> t -> t * t * t

return (f x, grad f x, hessian f x)

val gradhessianv : (t -> t) -> t -> t -> t * t

return (grad f x v, hessian f x v)

val gradhessianv' : (t -> t) -> t -> t -> t * t * t

return (f x, grad f x v, hessian f x v)

include Owl_algodiff_ops_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
module Builder : - Owl_algodiff_ops_builder_sig.Sig - with type t := t - and type elt := A.elt - and type arr := A.arr - and type op := op
Supported Maths functions
module Maths : sig ... end
Supported Linalg functions
module Linalg : sig ... end
Supported Neural Network functions
module NN : sig ... end
Supported Mat functions
module Mat : sig ... end
Supported Arr functions
module Arr : sig ... end
Helper functions
include Owl_algodiff_graph_convert_sig.Sig with type t := t
val to_trace : t list -> string

to_trace [t0; t1; ...] outputs the trace of computation graph on the terminal in a human-readable format.

val to_dot : t list -> string

to_dot [t0; t1; ...] outputs the trace of computation graph in the dot file format which you can use other tools further visualisation, such as Graphviz.

val pp_num : Stdlib.Format.formatter -> t -> unit

pp_num t pretty prints the abstract number used in Algodiff.

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Batch/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Batch/index.html deleted file mode 100644 index bd94d7c1a..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Batch/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Batch (owl.Owl_regression_generic_sig.Sig.Optimise.Batch)

Module Optimise.Batch

Batch module

type typ =
  1. | Full
  2. | Mini of int
  3. | Sample of int
  4. | Stochastic

Types of batches.

val run : typ -> Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

Execute the computations defined in module typ.

val batches : typ -> Algodiff.t -> int

Return the total number of batches given a batch typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Checkpoint/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Checkpoint/index.html deleted file mode 100644 index fef6a6452..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Checkpoint/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Checkpoint (owl.Owl_regression_generic_sig.Sig.Optimise.Checkpoint)

Module Optimise.Checkpoint

Checkpoint module

type state = {
  1. mutable current_batch : int;
  2. mutable batches_per_epoch : int;
  3. mutable epochs : float;
  4. mutable batches : int;
  5. mutable loss : Algodiff.t array;
  6. mutable start_at : float;
  7. mutable stop : bool;
  8. mutable gs : Algodiff.t array array;
  9. mutable ps : Algodiff.t array array;
  10. mutable us : Algodiff.t array array;
  11. mutable ch : Algodiff.t array array array;
}

Type definition of checkpoint

type typ =
  1. | Batch of int
  2. | Epoch of float
  3. | Custom of state -> unit
  4. | None

Batch type.

val init_state : int -> float -> state

init_state batches_per_epoch epochs initialises a state by specifying the number of batches per epoch and the number of epochs in total.

val default_checkpoint_fun : (string -> 'a) -> 'a

This function is used for saving intermediate files during optimisation.

val print_state_info : state -> unit

Print out the detail information of current state.

val print_summary : state -> unit

Print out the summary of current state.

val run : typ -> (string -> unit) -> int -> Algodiff.t -> state -> unit

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Clipping/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Clipping/index.html deleted file mode 100644 index 0214a2249..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Clipping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Clipping (owl.Owl_regression_generic_sig.Sig.Optimise.Clipping)

Module Optimise.Clipping

Clipping module

type typ =
  1. | L2norm of float
  2. | Value of float * float
  3. | None

Types of clipping functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Gradient/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Gradient/index.html deleted file mode 100644 index 2a8aba6c2..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Gradient/index.html +++ /dev/null @@ -1,9 +0,0 @@ - -Gradient (owl.Owl_regression_generic_sig.Sig.Optimise.Gradient)

Module Optimise.Gradient

Gradient module

type typ =
  1. | GD
  2. | CG
  3. | CD
  4. | NonlinearCG
  5. | DaiYuanCG
  6. | NewtonCG
  7. | Newton

Types of gradient function.

val run : - typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Learning_Rate/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Learning_Rate/index.html deleted file mode 100644 index efedd5395..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Learning_Rate/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Learning_Rate (owl.Owl_regression_generic_sig.Sig.Optimise.Learning_Rate)

Module Optimise.Learning_Rate

Strategies for learning rate update

type typ =
  1. | Adagrad of float
  2. | Const of float
  3. | Decay of float * float
  4. | Exp_decay of float * float
  5. | RMSprop of float * float
  6. | Adam of float * float * float
  7. | Schedule of float array

Representation of learning rate update strategies. Possible values include:

  • Adam (alpha, beta1, beta2), see ref for parameter meaning
val run : typ -> int -> Algodiff.t -> Algodiff.t array -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val update_ch : typ -> Algodiff.t -> Algodiff.t array -> Algodiff.t array

Update the cache of gradients.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Loss/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Loss/index.html deleted file mode 100644 index 8161f4e24..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Loss/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Loss (owl.Owl_regression_generic_sig.Sig.Optimise.Loss)

Module Optimise.Loss

Loss module

type typ =
  1. | Hinge
  2. | L1norm
  3. | L2norm
  4. | Quadratic
  5. | Cross_entropy
  6. | Custom of Algodiff.t -> Algodiff.t -> Algodiff.t

Types of loss functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Momentum/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Momentum/index.html deleted file mode 100644 index ff0343613..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Momentum/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Momentum (owl.Owl_regression_generic_sig.Sig.Optimise.Momentum)

Module Optimise.Momentum

Momentum module

type typ =
  1. | Standard of float
  2. | Nesterov of float
  3. | None

Types of momentum functions.

val run : typ -> Algodiff.t -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Params/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Params/index.html deleted file mode 100644 index 9451fbc87..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Params/index.html +++ /dev/null @@ -1,14 +0,0 @@ - -Params (owl.Owl_regression_generic_sig.Sig.Optimise.Params)

Module Optimise.Params

Params module

type typ = {
  1. mutable epochs : float;
  2. mutable batch : Batch.typ;
  3. mutable gradient : Gradient.typ;
  4. mutable loss : Loss.typ;
  5. mutable learning_rate : Learning_Rate.typ;
  6. mutable regularisation : Regularisation.typ;
  7. mutable momentum : Momentum.typ;
  8. mutable clipping : Clipping.typ;
  9. mutable stopping : Stopping.typ;
  10. mutable checkpoint : Checkpoint.typ;
  11. mutable verbosity : bool;
}

Type definition of parameter.

val default : unit -> typ

Create module typ with default values.

val config : - ?batch:Batch.typ -> - ?gradient:Gradient.typ -> - ?loss:Loss.typ -> - ?learning_rate:Learning_Rate.typ -> - ?regularisation:Regularisation.typ -> - ?momentum:Momentum.typ -> - ?clipping:Clipping.typ -> - ?stopping:Stopping.typ -> - ?checkpoint:Checkpoint.typ -> - ?verbosity:bool -> - float -> - typ

This function creates a parameter object with many configurations.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Regularisation/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Regularisation/index.html deleted file mode 100644 index 829cf932b..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Regularisation/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Regularisation (owl.Owl_regression_generic_sig.Sig.Optimise.Regularisation)

Module Optimise.Regularisation

Regularisation module

type typ =
  1. | L1norm of float
  2. | L2norm of float
  3. | Elastic_net of float * float
  4. | None

Types of regularisation functions.

val run : typ -> Algodiff.t -> Algodiff.t

Execute the computations defined in module typ.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Stopping/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Stopping/index.html deleted file mode 100644 index f9746c9c8..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Stopping/index.html +++ /dev/null @@ -1,2 +0,0 @@ - -Stopping (owl.Owl_regression_generic_sig.Sig.Optimise.Stopping)

Module Optimise.Stopping

Stopping module

type typ =
  1. | Const of float
  2. | Early of int * int
  3. | None

Types of stopping functions.

val run : typ -> float -> bool

Execute the computations defined in module typ.

val default : typ -> typ

Create module typ with default values.

val to_string : typ -> string

Convert the module typ to its string representation.

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Utils/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Utils/index.html deleted file mode 100644 index 078c01de9..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/Utils/index.html +++ /dev/null @@ -1,7 +0,0 @@ - -Utils (owl.Owl_regression_generic_sig.Sig.Optimise.Utils)

Module Optimise.Utils

Utils module

val sample_num : Algodiff.t -> int

Return the total number of samples in passed in ndarray.

val draw_samples : Algodiff.t -> Algodiff.t -> int -> Algodiff.t * Algodiff.t

draw_samples x y draws samples from both x (observations) and y (labels). The samples will be drew along axis 0, so x and y must agree along axis 0.

val get_chunk : - Algodiff.t -> - Algodiff.t -> - int -> - int -> - Algodiff.t * Algodiff.t

get_chunk x y i c gets a continuous chunk of c samples from position i from x (observations) and y (labels).

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/index.html deleted file mode 100644 index ccd0881e6..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/Optimise/index.html +++ /dev/null @@ -1,31 +0,0 @@ - -Optimise (owl.Owl_regression_generic_sig.Sig.Optimise)

Module Sig.Optimise

module Utils : sig ... end

Utils module

module Learning_Rate : sig ... end

Strategies for learning rate update

module Batch : sig ... end

Batch module

module Loss : sig ... end

Loss module

module Gradient : sig ... end

Gradient module

module Momentum : sig ... end

Momentum module

module Regularisation : sig ... end

Regularisation module

module Clipping : sig ... end

Clipping module

module Stopping : sig ... end

Stopping module

module Checkpoint : sig ... end

Checkpoint module

module Params : sig ... end

Params module

Core functions
val minimise_weight : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises the weight w of passed-in function f.

* f is a function f : w -> x -> y. * w is a row vector but y can have any shape.

val minimise_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t * Algodiff.t array array) -> - (Algodiff.t -> Algodiff.t array array * Algodiff.t array array) -> - (Algodiff.t array array -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

This function is specifically designed for minimising the weights in a neural network of graph structure. In Owl's earlier versions, the functions in the regression module were actually implemented using this function.

val minimise_fun : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t) -> - Algodiff.t -> - Checkpoint.state * Algodiff.t

This function minimises f : x -> y w.r.t x.

x is an ndarray; and y is an scalar value.

val minimise_compiled_network : - ?state:Checkpoint.state -> - Params.typ -> - (Algodiff.t -> Algodiff.t -> Algodiff.t) -> - (unit -> unit) -> - (string -> unit) -> - Algodiff.t -> - Algodiff.t -> - Checkpoint.state

TODO

\ No newline at end of file diff --git a/owl/Owl_regression_generic_sig/module-type-Sig/index.html b/owl/Owl_regression_generic_sig/module-type-Sig/index.html deleted file mode 100644 index ff803f302..000000000 --- a/owl/Owl_regression_generic_sig/module-type-Sig/index.html +++ /dev/null @@ -1,8 +0,0 @@ - -Sig (owl.Owl_regression_generic_sig.Sig)

Module type Owl_regression_generic_sig.Sig

Type definition

Type of ndarray values.

Type of scalar values.

Regression models
val ols : ?i:bool -> arr -> arr -> arr array

TODO

val ridge : ?i:bool -> ?alpha:float -> arr -> arr -> arr array

TODO

val lasso : ?i:bool -> ?alpha:float -> arr -> arr -> arr array

TODO

val elastic_net : - ?i:bool -> - ?alpha:float -> - ?l1_ratio:float -> - arr -> - arr -> - arr array

TODO

val svm : ?i:bool -> ?a:float -> arr -> arr -> arr array

TODO

val logistic : ?i:bool -> arr -> arr -> arr array

TODO

val exponential : ?i:bool -> arr -> arr -> elt * elt * elt

TODO

val poly : arr -> arr -> int -> arr

TODO

\ No newline at end of file diff --git a/owl/Owl_signal/.dummy b/owl/Owl_signal/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_slicing/.dummy b/owl/Owl_slicing/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_slicing_basic/.dummy b/owl/Owl_slicing_basic/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_slicing_fancy/.dummy b/owl/Owl_slicing_fancy/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_stats/.dummy b/owl/Owl_stats/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_stats_dist/.dummy b/owl/Owl_stats_dist/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_stats_extend/.dummy b/owl/Owl_stats_extend/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_stats_prng/.dummy b/owl/Owl_stats_prng/.dummy deleted file mode 100644 index e69de29bb..000000000 diff --git a/owl/Owl_stats_sampler/.dummy b/owl/Owl_stats_sampler/.dummy deleted file mode 100644 index e69de29bb..000000000